[ 518.966828] env[68233]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68233) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 518.967226] env[68233]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68233) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 518.967268] env[68233]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68233) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 518.967586] env[68233]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 519.062365] env[68233]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68233) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 519.072389] env[68233]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68233) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 519.115203] env[68233]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 519.674456] env[68233]: INFO nova.virt.driver [None req-a6e8e3d4-b6c2-4966-a903-f09039eabd13 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 519.742785] env[68233]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 519.742940] env[68233]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 519.743053] env[68233]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68233) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 522.695136] env[68233]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-5b72af7a-a449-40e9-9e34-e48eec223491 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.711242] env[68233]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68233) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 522.711397] env[68233]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-ac1379e9-ce77-48c6-9f34-c9d1f073c6b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.737059] env[68233]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d24b1. [ 522.737188] env[68233]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.994s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 522.737825] env[68233]: INFO nova.virt.vmwareapi.driver [None req-a6e8e3d4-b6c2-4966-a903-f09039eabd13 None None] VMware vCenter version: 7.0.3 [ 522.741451] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e18aac-854b-431f-9746-a604a52edd43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.758784] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f400fbc-16d7-41e0-858c-d4bc9f217957 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.764526] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec3ec75-b606-4195-9f60-3ffac24c413d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.771091] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d17f3e7-7519-40ac-b240-ca27aa13a90f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.784364] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bf4101-ee65-4159-8fc9-4bccc85ae47e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.790372] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a598a92e-1521-4920-b585-62b73288a806 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.820662] env[68233]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-3383265a-3767-46e8-84fa-200ad5335572 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 522.825587] env[68233]: DEBUG nova.virt.vmwareapi.driver [None req-a6e8e3d4-b6c2-4966-a903-f09039eabd13 None None] Extension org.openstack.compute already exists. {{(pid=68233) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 522.828217] env[68233]: INFO nova.compute.provider_config [None req-a6e8e3d4-b6c2-4966-a903-f09039eabd13 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 523.331466] env[68233]: DEBUG nova.context [None req-a6e8e3d4-b6c2-4966-a903-f09039eabd13 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),a91233d0-796b-4b91-9dd5-297def9244dc(cell1) {{(pid=68233) load_cells /opt/stack/nova/nova/context.py:464}} [ 523.333576] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 523.333802] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 523.334521] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 523.334926] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Acquiring lock "a91233d0-796b-4b91-9dd5-297def9244dc" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 523.335128] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Lock "a91233d0-796b-4b91-9dd5-297def9244dc" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 523.336311] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Lock "a91233d0-796b-4b91-9dd5-297def9244dc" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 523.356774] env[68233]: INFO dbcounter [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Registered counter for database nova_cell0 [ 523.365202] env[68233]: INFO dbcounter [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Registered counter for database nova_cell1 [ 523.368721] env[68233]: DEBUG oslo_db.sqlalchemy.engines [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68233) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 523.369097] env[68233]: DEBUG oslo_db.sqlalchemy.engines [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68233) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 523.373943] env[68233]: ERROR nova.db.main.api [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 523.373943] env[68233]: result = function(*args, **kwargs) [ 523.373943] env[68233]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.373943] env[68233]: return func(*args, **kwargs) [ 523.373943] env[68233]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 523.373943] env[68233]: result = fn(*args, **kwargs) [ 523.373943] env[68233]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 523.373943] env[68233]: return f(*args, **kwargs) [ 523.373943] env[68233]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 523.373943] env[68233]: return db.service_get_minimum_version(context, binaries) [ 523.373943] env[68233]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 523.373943] env[68233]: _check_db_access() [ 523.373943] env[68233]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 523.373943] env[68233]: stacktrace = ''.join(traceback.format_stack()) [ 523.373943] env[68233]: [ 523.374767] env[68233]: ERROR nova.db.main.api [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 523.374767] env[68233]: result = function(*args, **kwargs) [ 523.374767] env[68233]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 523.374767] env[68233]: return func(*args, **kwargs) [ 523.374767] env[68233]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 523.374767] env[68233]: result = fn(*args, **kwargs) [ 523.374767] env[68233]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 523.374767] env[68233]: return f(*args, **kwargs) [ 523.374767] env[68233]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 523.374767] env[68233]: return db.service_get_minimum_version(context, binaries) [ 523.374767] env[68233]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 523.374767] env[68233]: _check_db_access() [ 523.374767] env[68233]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 523.374767] env[68233]: stacktrace = ''.join(traceback.format_stack()) [ 523.374767] env[68233]: [ 523.375162] env[68233]: WARNING nova.objects.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Failed to get minimum service version for cell a91233d0-796b-4b91-9dd5-297def9244dc [ 523.375557] env[68233]: WARNING nova.objects.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 523.375764] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Acquiring lock "singleton_lock" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 523.375948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Acquired lock "singleton_lock" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 523.376203] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Releasing lock "singleton_lock" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 523.376532] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Full set of CONF: {{(pid=68233) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 523.376680] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ******************************************************************************** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 523.376811] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] Configuration options gathered from: {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 523.377476] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 523.377476] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 523.377476] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ================================================================================ {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 523.377756] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] allow_resize_to_same_host = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.377756] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] arq_binding_timeout = 300 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.377853] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] backdoor_port = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.377991] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] backdoor_socket = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.378125] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] block_device_allocate_retries = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.378306] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] block_device_allocate_retries_interval = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.378560] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cert = self.pem {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.378760] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.378848] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute_monitors = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.379033] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] config_dir = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.379214] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] config_drive_format = iso9660 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.379352] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.379521] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] config_source = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.379695] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] console_host = devstack {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.379864] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] control_exchange = nova {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.380058] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cpu_allocation_ratio = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.380277] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] daemon = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.380398] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] debug = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.380559] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] default_access_ip_network_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.380729] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] default_availability_zone = nova {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.380888] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] default_ephemeral_format = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.381060] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] default_green_pool_size = 1000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.381307] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.381474] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] default_schedule_zone = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.381639] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] disk_allocation_ratio = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.381828] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] enable_new_services = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.382077] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] enabled_apis = ['osapi_compute'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.382253] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] enabled_ssl_apis = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.382451] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] flat_injected = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.382577] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] force_config_drive = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.382732] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] force_raw_images = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.382904] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] graceful_shutdown_timeout = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.383086] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] heal_instance_info_cache_interval = -1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.383371] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] host = cpu-1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.383579] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.383752] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] initial_disk_allocation_ratio = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.383915] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] initial_ram_allocation_ratio = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.384144] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.384313] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_build_timeout = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.384475] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_delete_interval = 300 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.384644] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_format = [instance: %(uuid)s] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385119] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_name_template = instance-%08x {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385119] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_usage_audit = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385226] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_usage_audit_period = month {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385342] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385500] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] instances_path = /opt/stack/data/nova/instances {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385682] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] internal_service_availability_zone = internal {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.385843] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] key = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386013] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] live_migration_retry_count = 30 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386191] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_color = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386356] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_config_append = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386525] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386689] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_dir = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386849] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.386981] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_options = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.387157] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_rotate_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.387328] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_rotate_interval_type = days {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.387523] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] log_rotation_type = none {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.387693] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.387787] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.387956] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.388139] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.388268] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.388433] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] long_rpc_timeout = 1800 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.388595] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] max_concurrent_builds = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.388754] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] max_concurrent_live_migrations = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.388912] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] max_concurrent_snapshots = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.389089] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] max_local_block_devices = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.389254] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] max_logfile_count = 30 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.389415] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] max_logfile_size_mb = 200 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.389574] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] maximum_instance_delete_attempts = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.389746] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metadata_listen = 0.0.0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.389917] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metadata_listen_port = 8775 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.390099] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metadata_workers = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.390267] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] migrate_max_retries = -1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.390435] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] mkisofs_cmd = genisoimage {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.390642] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] my_block_storage_ip = 10.180.1.21 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.390777] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] my_ip = 10.180.1.21 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.390982] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.391163] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] network_allocate_retries = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.391343] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.391514] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] osapi_compute_listen = 0.0.0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.391683] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] osapi_compute_listen_port = 8774 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.391852] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] osapi_compute_unique_server_name_scope = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.392832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] osapi_compute_workers = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.392832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] password_length = 12 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.392832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] periodic_enable = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.392832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] periodic_fuzzy_delay = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.392832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] pointer_model = usbtablet {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393014] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] preallocate_images = none {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393098] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] publish_errors = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393240] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] pybasedir = /opt/stack/nova {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393414] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ram_allocation_ratio = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393595] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] rate_limit_burst = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393777] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] rate_limit_except_level = CRITICAL {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.393940] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] rate_limit_interval = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.394143] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reboot_timeout = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.394314] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reclaim_instance_interval = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.394477] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] record = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.394651] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reimage_timeout_per_gb = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.394821] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] report_interval = 120 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.394998] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] rescue_timeout = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.395184] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reserved_host_cpus = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.395356] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reserved_host_disk_mb = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.395535] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reserved_host_memory_mb = 512 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.395710] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] reserved_huge_pages = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.395877] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] resize_confirm_window = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.396053] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] resize_fs_using_block_device = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.396220] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] resume_guests_state_on_host_boot = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.396391] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.396556] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] rpc_response_timeout = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.396721] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] run_external_periodic_tasks = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.396892] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] running_deleted_instance_action = reap {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.397066] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] running_deleted_instance_poll_interval = 1800 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.397230] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] running_deleted_instance_timeout = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.397390] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler_instance_sync_interval = 120 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.397588] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_down_time = 720 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.397791] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] servicegroup_driver = db {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.397922] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] shell_completion = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.398099] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] shelved_offload_time = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.398267] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] shelved_poll_interval = 3600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.398440] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] shutdown_timeout = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.398606] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] source_is_ipv6 = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.398768] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ssl_only = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.399033] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.399211] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] sync_power_state_interval = 600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.399377] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] sync_power_state_pool_size = 1000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.399549] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] syslog_log_facility = LOG_USER {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.399712] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] tempdir = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.399874] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] timeout_nbd = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.400055] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] transport_url = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.400223] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] update_resources_interval = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.400385] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] use_cow_images = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.400549] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] use_journal = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.400711] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] use_json = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.400871] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] use_rootwrap_daemon = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.401041] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] use_stderr = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.401206] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] use_syslog = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.401364] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vcpu_pin_set = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.401532] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plugging_is_fatal = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.401702] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plugging_timeout = 300 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.401867] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] virt_mkfs = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.402040] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] volume_usage_poll_interval = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.402207] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] watch_log_file = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.402377] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] web = /usr/share/spice-html5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 523.402562] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.402730] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.402896] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.403079] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_concurrency.disable_process_locking = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.841834] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.842229] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.842268] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.842449] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.842624] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.842798] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.842989] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.auth_strategy = keystone {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.843185] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.compute_link_prefix = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.843365] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.843544] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.dhcp_domain = novalocal {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.843719] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.enable_instance_password = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.843888] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.glance_link_prefix = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.844105] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.844294] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.844467] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.instance_list_per_project_cells = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.844635] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.list_records_by_skipping_down_cells = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.844801] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.local_metadata_per_cell = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.844973] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.max_limit = 1000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.845162] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.metadata_cache_expiration = 15 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.845341] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.neutron_default_tenant_id = default {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.845516] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.response_validation = warn {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.845686] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.use_neutron_default_nets = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.845854] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.846030] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.846205] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.846376] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.846554] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_dynamic_targets = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.846758] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_jsonfile_path = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.846984] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.847215] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.backend = dogpile.cache.memcached {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.847392] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.backend_argument = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.847590] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.backend_expiration_time = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.847778] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.config_prefix = cache.oslo {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.847961] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.dead_timeout = 60.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.848148] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.debug_cache_backend = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.848320] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.enable_retry_client = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.848494] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.enable_socket_keepalive = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.848666] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.enabled = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.848833] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.enforce_fips_mode = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.849015] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.expiration_time = 600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.849178] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.hashclient_retry_attempts = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.849347] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.hashclient_retry_delay = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.849514] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_dead_retry = 300 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.849676] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_password = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.849868] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.850063] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.850279] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_pool_maxsize = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.850457] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.850629] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_sasl_enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.850811] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.850981] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_socket_timeout = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.851160] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.memcache_username = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.851331] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.proxies = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.851496] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_db = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.851659] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_password = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.851832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_sentinel_service_name = mymaster {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.852023] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.852193] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_server = localhost:6379 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.852363] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_socket_timeout = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.852524] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.redis_username = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.852690] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.retry_attempts = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.852879] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.retry_delay = 0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.853072] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.socket_keepalive_count = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.853242] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.socket_keepalive_idle = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.853410] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.socket_keepalive_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.853571] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.tls_allowed_ciphers = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.853733] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.tls_cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.853893] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.tls_certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.854068] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.tls_enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.854231] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cache.tls_keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.854404] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.854581] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.auth_type = password {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.854751] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.854928] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.catalog_info = volumev3::publicURL {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.855105] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.855277] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.855445] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.cross_az_attach = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.855611] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.debug = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.855773] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.endpoint_template = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.855963] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.http_retries = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.856150] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.856314] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.856488] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.os_region_name = RegionOne {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.856658] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.856820] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cinder.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.856995] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.857172] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.cpu_dedicated_set = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.857335] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.cpu_shared_set = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.857525] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.image_type_exclude_list = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.857700] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.857929] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.max_concurrent_disk_ops = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.858110] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.max_disk_devices_to_attach = -1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.858281] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.858456] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.858623] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.resource_provider_association_refresh = 300 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.858785] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.858980] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.shutdown_retry_interval = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.859188] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.859373] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] conductor.workers = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.859555] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] console.allowed_origins = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.859720] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] console.ssl_ciphers = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.859892] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] console.ssl_minimum_version = default {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.860083] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] consoleauth.enforce_session_timeout = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.860261] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] consoleauth.token_ttl = 600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.860438] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.860600] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.860766] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.860927] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.861102] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.861264] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.861431] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.861587] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.861748] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.861934] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.862113] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.862279] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.862437] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.862612] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.service_type = accelerator {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.862774] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.862936] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.863109] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.863295] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.863454] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.863618] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] cyborg.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.863792] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.asyncio_connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.863953] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.asyncio_slave_connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.864141] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.backend = sqlalchemy {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.864316] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.864484] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.connection_debug = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.864656] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.connection_parameters = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.864824] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.connection_recycle_time = 3600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.865028] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.connection_trace = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.865204] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.db_inc_retry_interval = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.865373] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.db_max_retries = 20 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.865541] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.db_max_retry_interval = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.865707] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.db_retry_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.865874] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.max_overflow = 50 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.866048] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.max_pool_size = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.866217] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.max_retries = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.866389] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.866549] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.mysql_wsrep_sync_wait = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.866711] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.pool_timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.866871] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.retry_interval = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.867096] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.slave_connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.867380] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.sqlite_synchronous = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.867643] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] database.use_db_reconnect = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.867934] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.asyncio_connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.868184] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.asyncio_slave_connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.868381] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.backend = sqlalchemy {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.868563] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.868736] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.connection_debug = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.868909] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.connection_parameters = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.869098] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.connection_recycle_time = 3600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.869274] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.connection_trace = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.869443] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.db_inc_retry_interval = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.869613] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.db_max_retries = 20 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.869782] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.db_max_retry_interval = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.869950] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.db_retry_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.870129] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.max_overflow = 50 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.870298] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.max_pool_size = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.870461] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.max_retries = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.870636] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.870803] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.870988] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.pool_timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.871182] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.retry_interval = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.871345] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.slave_connection = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.871511] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] api_database.sqlite_synchronous = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.871693] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] devices.enabled_mdev_types = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.871874] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.872116] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ephemeral_storage_encryption.default_format = luks {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.872231] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ephemeral_storage_encryption.enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.872398] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.872572] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.api_servers = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.872740] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.872903] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.873082] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.873249] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.873414] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.873578] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.debug = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.873746] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.default_trusted_certificate_ids = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.873935] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.enable_certificate_validation = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.874126] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.enable_rbd_download = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.874293] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.874464] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.874631] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.874796] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.874953] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.875131] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.num_retries = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.875305] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.rbd_ceph_conf = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.875471] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.rbd_connect_timeout = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.875645] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.rbd_pool = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.875815] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.rbd_user = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.875985] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.876161] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.876323] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.876494] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.service_type = image {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.876659] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.876818] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.877008] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.877188] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.877376] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.877569] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.verify_glance_signatures = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.877741] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] glance.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.877916] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] guestfs.debug = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.878131] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.878316] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.auth_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.878482] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.878644] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.878810] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.878973] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.879149] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.879310] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.879476] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.879637] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.879798] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.879956] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.880129] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.880292] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.880452] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.880623] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.service_type = shared-file-system {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.880790] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.share_apply_policy_timeout = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.880955] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.881157] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.881335] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.881499] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.881687] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.881852] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] manila.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.882034] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] mks.enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.882431] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.882631] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] image_cache.manager_interval = 2400 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.882803] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] image_cache.precache_concurrency = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.882978] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] image_cache.remove_unused_base_images = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.883165] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.883336] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.883524] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] image_cache.subdirectory_name = _base {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.883717] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.api_max_retries = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.883887] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.api_retry_interval = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.884061] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.884232] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.auth_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.884396] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.884558] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.884729] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.884891] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.conductor_group = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.885067] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.885278] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.885452] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.885623] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.885785] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.885947] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.886122] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.886294] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.peer_list = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.886457] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.886620] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.886786] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.serial_console_state_timeout = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.886949] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.887137] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.service_type = baremetal {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.887301] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.shard = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.887488] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.887667] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.887832] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.887994] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.888242] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.888441] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ironic.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.888644] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.888819] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] key_manager.fixed_key = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.889016] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.889198] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.barbican_api_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.889365] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.barbican_endpoint = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.889541] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.barbican_endpoint_type = public {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.889707] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.barbican_region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.889868] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.890041] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.890215] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.890380] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.890542] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.890710] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.number_of_retries = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.890876] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.retry_delay = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.891051] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.send_service_user_token = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.891245] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.891423] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.891592] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.verify_ssl = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.891754] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican.verify_ssl_path = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.891925] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.892107] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.auth_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.892272] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.892433] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.892601] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.892764] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.892924] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.893102] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.893266] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] barbican_service_user.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.893436] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.approle_role_id = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.893601] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.approle_secret_id = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.893775] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.kv_mountpoint = secret {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.893991] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.kv_path = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.894124] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.kv_version = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.894325] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.namespace = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.894503] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.root_token_id = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.894669] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.ssl_ca_crt_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.894839] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.timeout = 60.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.895014] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.use_ssl = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.895197] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.895387] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.895568] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.895739] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.895903] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.896079] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.896245] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.896410] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.896570] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.896733] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.896892] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.897066] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.897266] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.897479] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.897642] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.service_type = identity {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.897814] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.897978] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.898154] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.898320] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.898516] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.898678] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] keystone.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.898873] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.ceph_mount_options = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.899223] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.899412] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.connection_uri = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.899582] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_mode = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.899759] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_model_extra_flags = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.899963] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_models = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.900163] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_power_governor_high = performance {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.900341] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_power_governor_low = powersave {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.900514] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_power_management = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.900691] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.900871] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.device_detach_attempts = 8 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.901045] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.device_detach_timeout = 20 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.901222] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.disk_cachemodes = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.901388] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.disk_prefix = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.901556] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.enabled_perf_events = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.901723] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.file_backed_memory = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.901922] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.gid_maps = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.902116] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.hw_disk_discard = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.902284] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.hw_machine_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.902457] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_rbd_ceph_conf = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.902626] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.902792] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.902961] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_rbd_glance_store_name = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.903147] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_rbd_pool = rbd {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.903319] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_type = default {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.903482] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.images_volume_group = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.903673] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.inject_key = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.903815] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.inject_partition = -2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.903979] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.inject_password = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.907422] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.iscsi_iface = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.907645] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.iser_use_multipath = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.907828] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_bandwidth = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.908038] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.908242] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_downtime = 500 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.908412] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.908597] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.908764] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_inbound_addr = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.908945] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.909140] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_permit_post_copy = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.909311] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_scheme = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.909511] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_timeout_action = abort {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.909685] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_tunnelled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.909848] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_uri = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.910025] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.live_migration_with_native_tls = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.910194] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.max_queues = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.910359] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.910708] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.910892] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.nfs_mount_options = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.911237] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.911424] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.911599] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.num_iser_scan_tries = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.911771] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.num_memory_encrypted_guests = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.911941] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.912124] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.num_pcie_ports = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.912299] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.num_volume_scan_tries = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.912470] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.pmem_namespaces = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.912635] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.quobyte_client_cfg = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.912928] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.913119] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rbd_connect_timeout = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.913290] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.913457] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.913623] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rbd_secret_uuid = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.913787] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rbd_user = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.913999] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.914213] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.remote_filesystem_transport = ssh {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.914382] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rescue_image_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.914548] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rescue_kernel_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.914712] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rescue_ramdisk_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.914885] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.915064] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.rx_queue_size = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.915241] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.smbfs_mount_options = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.915570] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.915758] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.snapshot_compression = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.915926] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.snapshot_image_format = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.916192] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.916371] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.sparse_logical_volumes = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.916542] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.swtpm_enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.916718] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.swtpm_group = tss {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.916890] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.swtpm_user = tss {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.917163] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.sysinfo_serial = unique {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.917357] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.tb_cache_size = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.917545] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.tx_queue_size = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.917724] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.uid_maps = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.917894] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.use_virtio_for_bridges = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.918082] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.virt_type = kvm {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.918262] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.volume_clear = zero {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.918434] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.volume_clear_size = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.918600] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.volume_use_multipath = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.918762] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_cache_path = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.918933] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.919117] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_mount_group = qemu {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.919308] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_mount_opts = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.919501] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.919803] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.919989] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.vzstorage_mount_user = stack {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.920178] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.920355] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.920531] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.auth_type = password {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.920697] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.920858] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.921026] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.921191] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.921350] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.921523] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.default_floating_pool = public {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.921684] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.921847] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.extension_sync_interval = 600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.922021] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.http_retries = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.922189] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.922370] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.922544] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.922727] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.922888] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.923070] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.ovs_bridge = br-int {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.923243] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.physnets = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.923414] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.region_name = RegionOne {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.923577] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.923748] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.service_metadata_proxy = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.923909] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.924091] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.service_type = network {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.924259] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.924420] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.924581] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.924742] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.924927] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.925093] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] neutron.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.925268] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] notifications.bdms_in_notifications = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.925478] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] notifications.default_level = INFO {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.925652] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] notifications.include_share_mapping = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.925830] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] notifications.notification_format = unversioned {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.926019] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] notifications.notify_on_state_change = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.926184] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.926363] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] pci.alias = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.926533] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] pci.device_spec = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.926700] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] pci.report_in_placement = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.926873] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.927059] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.auth_type = password {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.927234] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.927396] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.927581] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.927755] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.927918] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.928094] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.928259] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.default_domain_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.928452] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.default_domain_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.928646] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.domain_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.928910] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.domain_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.929210] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.929500] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.929778] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.930048] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.930248] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.930453] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.password = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.930648] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.project_domain_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.930911] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.project_domain_name = Default {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.931130] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.project_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.931320] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.project_name = service {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.931497] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.region_name = RegionOne {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.931669] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.931833] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.932023] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.service_type = placement {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.932194] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.932360] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.932525] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.932690] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.system_scope = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.932850] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.933064] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.trust_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.933263] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.user_domain_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.933440] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.user_domain_name = Default {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.933607] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.user_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.933785] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.username = nova {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.933965] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.934145] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] placement.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.934337] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.cores = 20 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.934505] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.count_usage_from_placement = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.934679] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.934849] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.injected_file_content_bytes = 10240 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.935027] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.injected_file_path_length = 255 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.935199] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.injected_files = 5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.935364] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.instances = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.935531] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.key_pairs = 100 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.935696] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.metadata_items = 128 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.935862] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.ram = 51200 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.936055] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.recheck_quota = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.936247] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.server_group_members = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.936417] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.server_groups = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.936626] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.936805] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] quota.unified_limits_resource_strategy = require {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.936980] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.937163] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.937327] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.image_metadata_prefilter = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.937520] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.937695] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.max_attempts = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.937861] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.max_placement_results = 1000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.938034] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.938203] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.query_placement_for_image_type_support = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.938365] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.938550] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] scheduler.workers = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.938720] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.938892] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.939106] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.939298] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.939469] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.939640] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.939805] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.939995] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.940181] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.host_subset_size = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.940348] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.940509] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.940676] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.940838] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.941013] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.isolated_hosts = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.941201] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.isolated_images = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.941366] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.941528] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.941693] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.941875] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.pci_in_placement = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.942067] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.942238] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.942401] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.942563] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.942727] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.942891] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.943064] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.track_instance_changes = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.943246] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.943419] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metrics.required = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.943587] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metrics.weight_multiplier = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.943754] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.943919] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] metrics.weight_setting = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.944263] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.944442] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] serial_console.enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.944623] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] serial_console.port_range = 10000:20000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.944796] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.944993] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.945187] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] serial_console.serialproxy_port = 6083 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.945360] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.945561] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.auth_type = password {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.945735] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.945898] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.946075] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.946242] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.946403] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.946575] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.send_service_user_token = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.946742] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.946993] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] service_user.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.947084] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.agent_enabled = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.947252] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.947587] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.947799] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.947974] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.html5proxy_port = 6082 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.948153] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.image_compression = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.948318] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.jpeg_compression = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.948481] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.playback_compression = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.948645] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.require_secure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.948817] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.server_listen = 127.0.0.1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.948988] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.949283] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.949460] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.streaming_mode = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.949627] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] spice.zlib_compression = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.949798] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] upgrade_levels.baseapi = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.949973] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] upgrade_levels.compute = auto {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.950159] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] upgrade_levels.conductor = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.950319] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] upgrade_levels.scheduler = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.950488] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.950657] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.auth_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.950816] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.950975] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.951151] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.951317] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.951477] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.951642] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.951802] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vendordata_dynamic_auth.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.951976] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.api_retry_count = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.952153] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.ca_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.952329] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.cache_prefix = devstack-image-cache {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.952500] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.cluster_name = testcl1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.952670] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.connection_pool_size = 10 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.952831] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.console_delay_seconds = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.953008] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.datastore_regex = ^datastore.* {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.953227] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.953404] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.host_password = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.953575] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.host_port = 443 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.953751] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.host_username = administrator@vsphere.local {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.953920] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.insecure = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.954097] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.integration_bridge = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.954266] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.maximum_objects = 100 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.954426] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.pbm_default_policy = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.954589] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.pbm_enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.954752] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.pbm_wsdl_location = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.954921] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.955092] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.serial_port_proxy_uri = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.955255] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.serial_port_service_uri = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.955424] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.task_poll_interval = 0.5 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.955621] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.use_linked_clone = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.955798] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.vnc_keymap = en-us {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.955967] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.vnc_port = 5900 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.956151] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vmware.vnc_port_total = 10000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.956340] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.auth_schemes = ['none'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.956518] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.956802] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.956987] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.957177] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.novncproxy_port = 6080 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.957364] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.server_listen = 127.0.0.1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.957573] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.957746] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.vencrypt_ca_certs = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.957960] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.vencrypt_client_cert = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.958144] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vnc.vencrypt_client_key = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.958329] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.958509] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.disable_deep_image_inspection = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.958684] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.958850] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.959022] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.959224] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.disable_rootwrap = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.959443] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.enable_numa_live_migration = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.959621] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.959788] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.959953] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.960134] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.libvirt_disable_apic = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.960303] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.960469] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.960632] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.960792] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.960954] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.961130] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.961293] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.961454] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.961616] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.961787] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.961966] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.962154] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.client_socket_timeout = 900 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.962322] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.default_pool_size = 1000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.962493] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.keep_alive = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.962664] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.max_header_line = 16384 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.962828] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.secure_proxy_ssl_header = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.962990] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.ssl_ca_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.963168] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.ssl_cert_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.963331] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.ssl_key_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.963498] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.tcp_keepidle = 600 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.963674] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.963842] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] zvm.ca_file = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.964016] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] zvm.cloud_connector_url = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.964441] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.964630] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] zvm.reachable_timeout = 300 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.964812] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.964994] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.965196] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.connection_string = messaging:// {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.965375] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.enabled = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.965585] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.es_doc_type = notification {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.965768] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.es_scroll_size = 10000 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.965947] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.es_scroll_time = 2m {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.966131] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.filter_error_trace = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.966309] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.hmac_keys = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.966519] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.sentinel_service_name = mymaster {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.966751] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.socket_timeout = 0.1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.966931] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.trace_requests = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.967118] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler.trace_sqlalchemy = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.967309] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler_jaeger.process_tags = {} {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.967505] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler_jaeger.service_name_prefix = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.967678] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] profiler_otlp.service_name_prefix = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.967850] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] remote_debug.host = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.968025] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] remote_debug.port = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.968211] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.968379] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.968546] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.968714] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.968881] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.969058] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.969229] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.969394] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.969560] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.969734] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.969897] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.970081] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.970257] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.970428] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.970596] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.970794] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.970935] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.971114] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.971289] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.971453] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.971621] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.971788] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.971952] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.972129] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.972296] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.972462] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.972626] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.972791] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.972954] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.973133] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.973301] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.ssl = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.973473] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.973645] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.973809] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.973976] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.974160] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.ssl_version = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.974335] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.974515] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.974683] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_notifications.retry = -1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.974861] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.975045] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_messaging_notifications.transport_url = **** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.975230] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.auth_section = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.975398] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.auth_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.975560] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.cafile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.975720] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.certfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.975883] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.collect_timing = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.976058] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.connect_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.976225] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.connect_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.976387] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.endpoint_id = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.976564] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.endpoint_interface = publicURL {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.976728] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.endpoint_override = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.976889] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.endpoint_region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.977069] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.endpoint_service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.977235] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.endpoint_service_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.977403] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.insecure = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.977571] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.keyfile = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.977734] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.max_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.977896] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.min_version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.978069] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.region_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.978233] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.retriable_status_codes = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.978391] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.service_name = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.978575] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.service_type = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.978748] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.split_loggers = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.978910] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.status_code_retries = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.979101] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.status_code_retry_delay = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.979248] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.timeout = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.979412] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.valid_interfaces = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.979573] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_limit.version = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.979743] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_reports.file_event_handler = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.979910] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.980084] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] oslo_reports.log_dir = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.980262] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.980426] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.980632] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.980841] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.981026] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.981196] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.981371] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.981536] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_ovs_privileged.group = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.981702] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.981870] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.982047] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.982244] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] vif_plug_ovs_privileged.user = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.982384] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.flat_interface = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.982569] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.982751] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.982928] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.983115] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.983295] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.983467] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.983644] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.983840] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.984026] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.isolate_vif = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.984210] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.984381] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.984558] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.984732] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.ovsdb_interface = native {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.984903] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] os_vif_ovs.per_port_bridge = False {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.985087] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] privsep_osbrick.capabilities = [21] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.985255] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] privsep_osbrick.group = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.985419] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] privsep_osbrick.helper_command = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.985588] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.985754] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.985915] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] privsep_osbrick.user = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.986106] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.986269] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] nova_sys_admin.group = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.986427] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] nova_sys_admin.helper_command = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.986595] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.986762] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.986922] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] nova_sys_admin.user = None {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 523.987076] env[68233]: DEBUG oslo_service.backend.eventlet.service [None req-2f89bfb5-ad29-4871-b20e-ab03e434552e None None] ******************************************************************************** {{(pid=68233) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 523.987513] env[68233]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 524.491268] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Getting list of instances from cluster (obj){ [ 524.491268] env[68233]: value = "domain-c8" [ 524.491268] env[68233]: _type = "ClusterComputeResource" [ 524.491268] env[68233]: } {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 524.492369] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd59402-2e3e-4676-9b95-c25988777a55 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.501536] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Got total of 0 instances {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 524.502108] env[68233]: WARNING nova.virt.vmwareapi.driver [None req-e601799e-9543-4a3d-bb33-988562218716 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 524.502608] env[68233]: INFO nova.virt.node [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Generated node identity 51aa13e7-0977-4031-b209-4ae90c83752c [ 524.502859] env[68233]: INFO nova.virt.node [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Wrote node identity 51aa13e7-0977-4031-b209-4ae90c83752c to /opt/stack/data/n-cpu-1/compute_id [ 525.005420] env[68233]: WARNING nova.compute.manager [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Compute nodes ['51aa13e7-0977-4031-b209-4ae90c83752c'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 526.011652] env[68233]: INFO nova.compute.manager [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 527.017426] env[68233]: WARNING nova.compute.manager [None req-e601799e-9543-4a3d-bb33-988562218716 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 527.017855] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 527.017978] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 527.018146] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 527.018302] env[68233]: DEBUG nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 527.019297] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfb0d99-ef7b-4f49-919d-fdf97abeadb6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.027811] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f25711e-8482-4f58-80c3-c19058bac134 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.041092] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efe49ed-f047-4345-9a8f-82415fa4aaf4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.047200] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f558081-d238-4593-9920-12b044a40221 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 527.075804] env[68233]: DEBUG nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181107MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 527.075951] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 527.076147] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 527.578870] env[68233]: WARNING nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] No compute node record for cpu-1:51aa13e7-0977-4031-b209-4ae90c83752c: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 51aa13e7-0977-4031-b209-4ae90c83752c could not be found. [ 528.082432] env[68233]: INFO nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 51aa13e7-0977-4031-b209-4ae90c83752c [ 529.597561] env[68233]: DEBUG nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 529.597561] env[68233]: DEBUG nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 529.749835] env[68233]: INFO nova.scheduler.client.report [None req-e601799e-9543-4a3d-bb33-988562218716 None None] [req-ef2b5d0e-a4f1-467a-b68b-43dcab22abaf] Created resource provider record via placement API for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 529.766115] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f43534-4048-46dc-bf0c-195db3c4c7b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.772258] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5666d0ab-38d4-4a18-9190-50d882acd1c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.802941] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8c1a4b-363c-4c72-9b4a-a8302b1d330c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.810264] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aed6823-6579-40c8-9ab5-fb623ac9c2bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 529.823157] env[68233]: DEBUG nova.compute.provider_tree [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 530.356353] env[68233]: DEBUG nova.scheduler.client.report [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 530.356587] env[68233]: DEBUG nova.compute.provider_tree [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 0 to 1 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 530.356727] env[68233]: DEBUG nova.compute.provider_tree [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 530.404513] env[68233]: DEBUG nova.compute.provider_tree [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 1 to 2 during operation: update_traits {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 530.909976] env[68233]: DEBUG nova.compute.resource_tracker [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 530.910244] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.834s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 530.910386] env[68233]: DEBUG nova.service [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Creating RPC server for service compute {{(pid=68233) start /opt/stack/nova/nova/service.py:186}} [ 530.924949] env[68233]: DEBUG nova.service [None req-e601799e-9543-4a3d-bb33-988562218716 None None] Join ServiceGroup membership for this service compute {{(pid=68233) start /opt/stack/nova/nova/service.py:203}} [ 530.925151] env[68233]: DEBUG nova.servicegroup.drivers.db [None req-e601799e-9543-4a3d-bb33-988562218716 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68233) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 537.928061] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 538.431086] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Getting list of instances from cluster (obj){ [ 538.431086] env[68233]: value = "domain-c8" [ 538.431086] env[68233]: _type = "ClusterComputeResource" [ 538.431086] env[68233]: } {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 538.432274] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa847fa-22d5-4644-a2cb-158d0275ff1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.440652] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Got total of 0 instances {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 538.440872] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 538.441184] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Getting list of instances from cluster (obj){ [ 538.441184] env[68233]: value = "domain-c8" [ 538.441184] env[68233]: _type = "ClusterComputeResource" [ 538.441184] env[68233]: } {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 538.442019] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59d34871-fd7b-4986-9e42-a9f4dc56c36e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 538.449890] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Got total of 0 instances {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 561.168962] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 561.168962] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 561.672452] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 562.211625] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 562.211625] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 562.214719] env[68233]: INFO nova.compute.claims [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.261434] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25353e07-c743-4530-8de6-d54d4bd26d37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.270072] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f570238-c679-4b55-8843-cb9d9f853fc1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.302193] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bcadf0-fbd8-46c9-9da4-a1ab52ebb825 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.309732] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa12258b-59d8-4c7e-a1ff-1247603ed5e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.323204] env[68233]: DEBUG nova.compute.provider_tree [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.827549] env[68233]: DEBUG nova.scheduler.client.report [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 564.335500] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.124s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 564.336199] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 564.842378] env[68233]: DEBUG nova.compute.utils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 564.845356] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 564.845356] env[68233]: DEBUG nova.network.neutron [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 565.347604] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 566.364494] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 566.965180] env[68233]: DEBUG nova.policy [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd87f21bd92b4aed92c864f51af27f37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3402c00284e744118d0798626f51d676', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 567.126848] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 567.127093] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.127244] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 567.127420] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.127587] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 567.127791] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 567.127970] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 567.128931] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 567.129409] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 567.129606] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 567.129781] env[68233]: DEBUG nova.virt.hardware [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 567.130751] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a820a088-f6f7-4f91-91d2-7380077759e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.143927] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71b9043-47d7-46b8-b266-584779298ff8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.163489] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa0d198-6a3b-4905-8d00-2c764c50ce2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.577111] env[68233]: DEBUG nova.network.neutron [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Successfully created port: 23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.842180] env[68233]: DEBUG nova.network.neutron [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Successfully updated port: 23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 571.346578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "refresh_cache-eb5dc742-fa8f-4bac-89cb-afa57b5abe12" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.346578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquired lock "refresh_cache-eb5dc742-fa8f-4bac-89cb-afa57b5abe12" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 571.346578] env[68233]: DEBUG nova.network.neutron [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 571.939907] env[68233]: DEBUG nova.network.neutron [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 572.302547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.302547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.485326] env[68233]: DEBUG nova.network.neutron [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Updating instance_info_cache with network_info: [{"id": "23205d19-f3cb-4543-8f61-314cc465e55f", "address": "fa:16:3e:18:58:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23205d19-f3", "ovs_interfaceid": "23205d19-f3cb-4543-8f61-314cc465e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.807265] env[68233]: DEBUG nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 572.893623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "6105602a-b8eb-4128-a492-b60a9468018f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.894785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "6105602a-b8eb-4128-a492-b60a9468018f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.989094] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Releasing lock "refresh_cache-eb5dc742-fa8f-4bac-89cb-afa57b5abe12" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 572.989316] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Instance network_info: |[{"id": "23205d19-f3cb-4543-8f61-314cc465e55f", "address": "fa:16:3e:18:58:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23205d19-f3", "ovs_interfaceid": "23205d19-f3cb-4543-8f61-314cc465e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 572.989776] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:58:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23205d19-f3cb-4543-8f61-314cc465e55f', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 573.003704] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 573.004310] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28da4d9a-53d3-47b7-bdb8-fceb6edcbea0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.018763] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Created folder: OpenStack in parent group-v4. [ 573.018933] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Creating folder: Project (3402c00284e744118d0798626f51d676). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 573.019247] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34b7859b-9bfc-4959-8fbb-66a73905e0bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.031362] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Created folder: Project (3402c00284e744118d0798626f51d676) in parent group-v559223. [ 573.031649] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Creating folder: Instances. Parent ref: group-v559224. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 573.032390] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e4cfcc1-5c53-4a89-b0d7-32865d1e3778 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.042250] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Created folder: Instances in parent group-v559224. [ 573.042507] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 573.042700] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 573.042901] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8d21295-1212-4136-bb8e-0b557c3ea50e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.069932] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 573.069932] env[68233]: value = "task-2781679" [ 573.069932] env[68233]: _type = "Task" [ 573.069932] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.079024] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781679, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.175748] env[68233]: DEBUG nova.compute.manager [req-b48f876b-e965-4402-965a-8ef316bc2d4b req-5c11e6b7-6c8f-49d3-8cfa-60b90304ff8e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Received event network-vif-plugged-23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 573.176243] env[68233]: DEBUG oslo_concurrency.lockutils [req-b48f876b-e965-4402-965a-8ef316bc2d4b req-5c11e6b7-6c8f-49d3-8cfa-60b90304ff8e service nova] Acquiring lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.176530] env[68233]: DEBUG oslo_concurrency.lockutils [req-b48f876b-e965-4402-965a-8ef316bc2d4b req-5c11e6b7-6c8f-49d3-8cfa-60b90304ff8e service nova] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.177418] env[68233]: DEBUG oslo_concurrency.lockutils [req-b48f876b-e965-4402-965a-8ef316bc2d4b req-5c11e6b7-6c8f-49d3-8cfa-60b90304ff8e service nova] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 573.177664] env[68233]: DEBUG nova.compute.manager [req-b48f876b-e965-4402-965a-8ef316bc2d4b req-5c11e6b7-6c8f-49d3-8cfa-60b90304ff8e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] No waiting events found dispatching network-vif-plugged-23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 573.177818] env[68233]: WARNING nova.compute.manager [req-b48f876b-e965-4402-965a-8ef316bc2d4b req-5c11e6b7-6c8f-49d3-8cfa-60b90304ff8e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Received unexpected event network-vif-plugged-23205d19-f3cb-4543-8f61-314cc465e55f for instance with vm_state building and task_state spawning. [ 573.251088] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "34889575-95ea-451c-aa59-49a5f30d4e4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.251873] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.340073] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.340364] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.342326] env[68233]: INFO nova.compute.claims [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 573.398677] env[68233]: DEBUG nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 573.463207] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.470083] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.582199] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781679, 'name': CreateVM_Task, 'duration_secs': 0.361553} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.585021] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 573.594497] env[68233]: DEBUG oslo_vmware.service [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0121f775-87ae-449a-b190-c76fc49291f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.601779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.601779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 573.602371] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 573.602609] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f04de042-ba25-40c2-a70c-680c033a4e70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.606901] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 573.606901] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f2fb21-9cd6-ce4d-dbc8-d4b66e701bfe" [ 573.606901] env[68233]: _type = "Task" [ 573.606901] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.615313] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f2fb21-9cd6-ce4d-dbc8-d4b66e701bfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.673578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.673578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.755165] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 573.922359] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.969558] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 574.121184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 574.121184] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 574.121184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 574.121184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 574.121580] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.121810] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13252d98-93c0-4b89-9d55-3dfa865f6d7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.131970] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.132238] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 574.133058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a699ab-5106-486f-99c7-0d19a8537ff4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.141594] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99c6ff8f-9834-40d2-97ef-f994231e040c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.150679] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 574.150679] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a2c3f-e3b4-278f-c759-ec10410f7c64" [ 574.150679] env[68233]: _type = "Task" [ 574.150679] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.161150] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a2c3f-e3b4-278f-c759-ec10410f7c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.177567] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 574.282511] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 574.498645] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6708869-2dbd-43b1-a8f8-3937faa1a14a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.506765] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781e2587-d557-4273-b106-050b877088e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.511913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 574.546137] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e77475f-a340-43b3-83d5-6e4ad5fc8e1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.558067] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50209395-c387-4ebf-937a-447b43e42ba5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.572252] env[68233]: DEBUG nova.compute.provider_tree [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.663308] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 574.663759] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Creating directory with path [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 574.664142] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bea080a5-5dda-46cf-bc13-68fdb8e063a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.690318] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Created directory with path [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 574.690613] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Fetch image to [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 574.690839] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Downloading image file data da133fda-e1e2-42a1-a7e0-b8b1426a8490 to [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk on the data store datastore2 {{(pid=68233) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 574.694949] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7576c46b-ec85-4eab-afe9-d724df5ea849 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.706660] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6a1df9-1f95-4685-87f8-afcabb4be384 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.710163] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 574.718121] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dfa090-30b9-41b0-bf4e-603fdb3be00b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.760107] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c76af1-caad-40f3-8eb2-43143f26ce65 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.766413] env[68233]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7b987db4-aa57-43a4-b2cc-2e4f2816dd33 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.797237] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Downloading image file data da133fda-e1e2-42a1-a7e0-b8b1426a8490 to the data store datastore2 {{(pid=68233) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 574.878791] env[68233]: DEBUG oslo_vmware.rw_handles [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 575.078039] env[68233]: DEBUG nova.scheduler.client.report [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 575.558373] env[68233]: DEBUG oslo_vmware.rw_handles [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 575.560530] env[68233]: DEBUG oslo_vmware.rw_handles [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 575.583882] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 575.584504] env[68233]: DEBUG nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 575.587174] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.665s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.589194] env[68233]: INFO nova.compute.claims [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 575.691092] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Downloaded image file data da133fda-e1e2-42a1-a7e0-b8b1426a8490 to vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk on the data store datastore2 {{(pid=68233) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 575.694099] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 575.694366] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Copying Virtual Disk [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk to [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 575.694657] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4da61128-1bda-4d7a-acc8-b09ee95f136d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.703677] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 575.703677] env[68233]: value = "task-2781680" [ 575.703677] env[68233]: _type = "Task" [ 575.703677] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.712733] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781680, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.003129] env[68233]: DEBUG nova.compute.manager [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Received event network-changed-23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 576.003129] env[68233]: DEBUG nova.compute.manager [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Refreshing instance network info cache due to event network-changed-23205d19-f3cb-4543-8f61-314cc465e55f. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 576.003129] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] Acquiring lock "refresh_cache-eb5dc742-fa8f-4bac-89cb-afa57b5abe12" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.003129] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] Acquired lock "refresh_cache-eb5dc742-fa8f-4bac-89cb-afa57b5abe12" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.003129] env[68233]: DEBUG nova.network.neutron [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Refreshing network info cache for port 23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 576.096781] env[68233]: DEBUG nova.compute.utils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 576.098142] env[68233]: DEBUG nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 576.216493] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781680, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.600869] env[68233]: DEBUG nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 576.719651] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781680, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.761262} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.721061] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Copied Virtual Disk [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk to [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 576.721427] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleting the datastore file [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 576.725518] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fa81d33-9141-42f6-b18a-be7cc7041768 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.738913] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 576.738913] env[68233]: value = "task-2781681" [ 576.738913] env[68233]: _type = "Task" [ 576.738913] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.746637] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781681, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.773634] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd866be0-7534-4198-8a21-6e8abbded662 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.779576] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6950795d-dbd5-41d0-b248-ad31090a0b4a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.825323] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf3a096-8ba0-41ac-8741-ef06e22ce990 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.833136] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fb272f-e04a-44ca-8dc4-393f56c08c93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.848858] env[68233]: DEBUG nova.compute.provider_tree [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 576.990425] env[68233]: DEBUG nova.network.neutron [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Updated VIF entry in instance network info cache for port 23205d19-f3cb-4543-8f61-314cc465e55f. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 576.990768] env[68233]: DEBUG nova.network.neutron [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Updating instance_info_cache with network_info: [{"id": "23205d19-f3cb-4543-8f61-314cc465e55f", "address": "fa:16:3e:18:58:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23205d19-f3", "ovs_interfaceid": "23205d19-f3cb-4543-8f61-314cc465e55f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.253737] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781681, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023006} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.253737] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 577.253737] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Moving file from [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85/da133fda-e1e2-42a1-a7e0-b8b1426a8490 to [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490. {{(pid=68233) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 577.253737] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-80fe8546-bd5d-4432-b51c-ab49f60581cd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.264116] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 577.264116] env[68233]: value = "task-2781682" [ 577.264116] env[68233]: _type = "Task" [ 577.264116] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.272200] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781682, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.354244] env[68233]: DEBUG nova.scheduler.client.report [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 577.496570] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e075a32-9683-47c6-8a2f-621614b1808c req-cf749b84-e1ba-4fac-9d9f-a0ab538c469e service nova] Releasing lock "refresh_cache-eb5dc742-fa8f-4bac-89cb-afa57b5abe12" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 577.613843] env[68233]: DEBUG nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 577.648216] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 577.648462] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.648641] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 577.648836] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.648975] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 577.649341] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 577.649646] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 577.649843] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 577.650079] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 577.650285] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 577.650512] env[68233]: DEBUG nova.virt.hardware [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 577.651440] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a4a99e-4871-41a6-8a63-cd11560db49e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.661256] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26a3b37-e08f-4c39-9a63-b5e13d21d96c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.677929] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 577.683936] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Creating folder: Project (103e7144782b4c419912542a50eaaa82). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 577.684277] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d08bc3b2-a5cf-4976-9af5-db9306d96695 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.695457] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Created folder: Project (103e7144782b4c419912542a50eaaa82) in parent group-v559223. [ 577.695679] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Creating folder: Instances. Parent ref: group-v559227. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 577.695920] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70d336ba-500b-4a87-ba11-57faf07ba6a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.707019] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Created folder: Instances in parent group-v559227. [ 577.707019] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 577.707019] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 577.707019] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c5c90cd-8a32-46d6-a069-e0e8e61a56cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.724115] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 577.724115] env[68233]: value = "task-2781685" [ 577.724115] env[68233]: _type = "Task" [ 577.724115] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.731855] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781685, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.775302] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781682, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024053} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 577.775818] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] File moved {{(pid=68233) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 577.776030] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Cleaning up location [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 577.776196] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleting the datastore file [datastore2] vmware_temp/08505c18-db81-4890-a005-dd219f058a85 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 577.776510] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cc57df5-a55b-40b6-908b-68e5e4d0a65f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.783265] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 577.783265] env[68233]: value = "task-2781686" [ 577.783265] env[68233]: _type = "Task" [ 577.783265] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.792765] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.857270] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 577.857270] env[68233]: DEBUG nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 577.859876] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.578s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 577.861390] env[68233]: INFO nova.compute.claims [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.236062] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781685, 'name': CreateVM_Task, 'duration_secs': 0.30091} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.236062] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 578.236538] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.236732] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.237119] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 578.237433] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4078ea7e-baaf-4a42-9259-2f83f46eba8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.243026] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 578.243026] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529ef632-8c2a-75fc-d87c-c1f9a28abb57" [ 578.243026] env[68233]: _type = "Task" [ 578.243026] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.254701] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529ef632-8c2a-75fc-d87c-c1f9a28abb57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.296933] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026185} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.297342] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 578.299597] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d269e6e-ef50-4652-8dcf-e0a2fbc9ca68 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.305795] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 578.305795] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52748912-cc6a-02ef-153b-209253a208ed" [ 578.305795] env[68233]: _type = "Task" [ 578.305795] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.320896] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52748912-cc6a-02ef-153b-209253a208ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.366255] env[68233]: DEBUG nova.compute.utils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 578.370927] env[68233]: DEBUG nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 578.754569] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529ef632-8c2a-75fc-d87c-c1f9a28abb57, 'name': SearchDatastore_Task, 'duration_secs': 0.009609} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.755234] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.755593] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.755906] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.817894] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52748912-cc6a-02ef-153b-209253a208ed, 'name': SearchDatastore_Task, 'duration_secs': 0.011894} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.817894] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.818198] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] eb5dc742-fa8f-4bac-89cb-afa57b5abe12/eb5dc742-fa8f-4bac-89cb-afa57b5abe12.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 578.818452] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.818815] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 578.818882] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f36c4af-1ba8-4a93-af69-ca5754a8b5fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.822085] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ccae0aa-4649-43a8-9025-5dc9cb810683 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.829263] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 578.829263] env[68233]: value = "task-2781687" [ 578.829263] env[68233]: _type = "Task" [ 578.829263] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.844189] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781687, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.856626] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 578.856912] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 578.857671] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c82baaeb-83be-4715-a0a5-ccc50076b13a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.864368] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 578.864368] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218dab4-6967-6fcf-476c-c3ad86e0baf2" [ 578.864368] env[68233]: _type = "Task" [ 578.864368] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.872976] env[68233]: DEBUG nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 578.878109] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218dab4-6967-6fcf-476c-c3ad86e0baf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.035963] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d788a8-e677-4e4c-81e1-5625e7bd5a15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.050653] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b6eb59-641f-415e-b937-ccfb53f4b3bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.090964] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe718be4-ad84-4a31-920f-af726dc4de01 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.099494] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3812e2f0-80e5-46cd-b04b-895ceab4e1e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.117789] env[68233]: DEBUG nova.compute.provider_tree [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.125996] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.125996] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.125996] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.126518] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.126583] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.126878] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.127888] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.127888] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 579.128131] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 579.339102] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781687, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.383799] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218dab4-6967-6fcf-476c-c3ad86e0baf2, 'name': SearchDatastore_Task, 'duration_secs': 0.021648} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.387459] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dcc6063-c918-481e-8737-5365b590f482 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.396347] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 579.396347] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5225b18c-0811-e0a9-9e16-a1b44c9e0ffb" [ 579.396347] env[68233]: _type = "Task" [ 579.396347] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.406601] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5225b18c-0811-e0a9-9e16-a1b44c9e0ffb, 'name': SearchDatastore_Task, 'duration_secs': 0.007451} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.406776] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 579.406872] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 579.407470] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-316d65ed-ddba-4039-b154-4ca84bb009e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.413721] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 579.413721] env[68233]: value = "task-2781688" [ 579.413721] env[68233]: _type = "Task" [ 579.413721] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.425759] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.625467] env[68233]: DEBUG nova.scheduler.client.report [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 579.635683] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.841103] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781687, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553889} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.841372] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] eb5dc742-fa8f-4bac-89cb-afa57b5abe12/eb5dc742-fa8f-4bac-89cb-afa57b5abe12.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 579.841542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 579.841934] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c7eedf1-8135-4239-8e0c-fb47658ba0be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.857025] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 579.857025] env[68233]: value = "task-2781689" [ 579.857025] env[68233]: _type = "Task" [ 579.857025] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.869524] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.886009] env[68233]: DEBUG nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 579.924328] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 579.925127] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 579.925127] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 579.925127] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 579.925127] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 579.926674] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 579.926674] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 579.926674] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 579.926674] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 579.926674] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 579.926915] env[68233]: DEBUG nova.virt.hardware [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 579.926999] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1e94b3-d47a-449c-b444-6f466516674d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.938883] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781688, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.939220] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7946233c-2363-48fa-8d6b-f0d0d8732d0f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.955711] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.963475] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Creating folder: Project (f9fcd492a632402f8cef62092307d39e). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 579.963837] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6dad7f39-90cb-41b3-918b-1d3401ff40ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.974820] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Created folder: Project (f9fcd492a632402f8cef62092307d39e) in parent group-v559223. [ 579.975110] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Creating folder: Instances. Parent ref: group-v559230. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 579.975288] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60cfad60-4e46-40f6-a79e-a7edf9912f09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.985354] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Created folder: Instances in parent group-v559230. [ 579.985607] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 579.985831] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 579.986396] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95c03973-a57a-4ff6-b9c1-c5da029e9e68 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.006965] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 580.006965] env[68233]: value = "task-2781692" [ 580.006965] env[68233]: _type = "Task" [ 580.006965] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.015749] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781692, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.136839] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.277s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 580.138466] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 580.139559] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.628s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.141006] env[68233]: INFO nova.compute.claims [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 580.370385] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132136} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.370468] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 580.371394] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db5d5d3-d337-4a6f-8ac9-389751bb2c2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.396757] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Reconfiguring VM instance instance-00000001 to attach disk [datastore2] eb5dc742-fa8f-4bac-89cb-afa57b5abe12/eb5dc742-fa8f-4bac-89cb-afa57b5abe12.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 580.397115] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a267cce1-7774-4ecf-aef6-04b3627e0279 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.419096] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 580.419096] env[68233]: value = "task-2781693" [ 580.419096] env[68233]: _type = "Task" [ 580.419096] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.425874] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558781} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.426624] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 580.426755] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 580.427092] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9406434-dab3-47f8-a988-badf91e4efde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.432476] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781693, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.437842] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 580.437842] env[68233]: value = "task-2781694" [ 580.437842] env[68233]: _type = "Task" [ 580.437842] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.448651] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781694, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.516443] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781692, 'name': CreateVM_Task, 'duration_secs': 0.438527} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.516658] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 580.517227] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.517227] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 580.517446] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 580.517689] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca665a07-7877-4da1-b19e-2ecaf6df2bcd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.522784] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 580.522784] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ddb0f1-6876-d7e2-f09e-8f8c9dfe1d08" [ 580.522784] env[68233]: _type = "Task" [ 580.522784] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.530906] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ddb0f1-6876-d7e2-f09e-8f8c9dfe1d08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.647757] env[68233]: DEBUG nova.compute.utils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 580.651125] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 580.651481] env[68233]: DEBUG nova.network.neutron [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 580.889347] env[68233]: DEBUG nova.policy [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66620aa49f4a4ef0a03b6936159957ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '66d6553299bf4ee2845b37cac3469206', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 580.930425] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781693, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.948084] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781694, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.32831} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.948535] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 580.949548] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80675d53-2050-44ac-983e-d9d9b7ac197a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.978994] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 580.978994] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca5c48a1-0218-42ea-b18b-d28f93e1c750 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.000077] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 581.000077] env[68233]: value = "task-2781695" [ 581.000077] env[68233]: _type = "Task" [ 581.000077] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.008916] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781695, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.033248] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ddb0f1-6876-d7e2-f09e-8f8c9dfe1d08, 'name': SearchDatastore_Task, 'duration_secs': 0.045179} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.033557] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.034182] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 581.034182] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.034182] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.034373] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 581.034654] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84b40a2b-4127-47b0-8319-da4d8c614d92 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.047359] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 581.047522] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 581.049689] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d184e6c-da3f-40a3-824c-ee18540ea63c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.058128] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 581.058128] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527b5d77-40de-9d33-38b6-a66bf4ed08db" [ 581.058128] env[68233]: _type = "Task" [ 581.058128] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.064784] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527b5d77-40de-9d33-38b6-a66bf4ed08db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.155628] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 581.303164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19f0d4cc-b00b-42fb-a5e4-8c1b7bff7d03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.313538] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7062c0d3-afd9-4d96-aa24-97f1b9cf5e09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.350769] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b0dc99-79fa-44e1-8225-cd2d9df3b0c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.360429] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51d3cbd7-2eda-4b1b-80e2-0d2145f457d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.378540] env[68233]: DEBUG nova.compute.provider_tree [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.434978] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781693, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.514746] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.571050] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527b5d77-40de-9d33-38b6-a66bf4ed08db, 'name': SearchDatastore_Task, 'duration_secs': 0.046569} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.571768] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e31d6a02-20a5-4d5c-b726-e78702a6a14a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.578099] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 581.578099] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ea42f1-e9b2-0dcb-fb51-65f93cb3448f" [ 581.578099] env[68233]: _type = "Task" [ 581.578099] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.588039] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ea42f1-e9b2-0dcb-fb51-65f93cb3448f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.882578] env[68233]: DEBUG nova.scheduler.client.report [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 581.935225] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781693, 'name': ReconfigVM_Task, 'duration_secs': 1.101684} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.935543] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Reconfigured VM instance instance-00000001 to attach disk [datastore2] eb5dc742-fa8f-4bac-89cb-afa57b5abe12/eb5dc742-fa8f-4bac-89cb-afa57b5abe12.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 581.936386] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-57a0afd4-1cf8-4f03-b803-3763e4d81d16 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.944925] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 581.944925] env[68233]: value = "task-2781696" [ 581.944925] env[68233]: _type = "Task" [ 581.944925] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.954220] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781696, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.959490] env[68233]: DEBUG nova.network.neutron [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Successfully created port: d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.012468] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781695, 'name': ReconfigVM_Task, 'duration_secs': 0.741359} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.012795] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Reconfigured VM instance instance-00000002 to attach disk [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 582.014270] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a48960c-9021-46dc-b9fd-c212243bd90e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.022744] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 582.022744] env[68233]: value = "task-2781697" [ 582.022744] env[68233]: _type = "Task" [ 582.022744] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.031790] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781697, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.089523] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ea42f1-e9b2-0dcb-fb51-65f93cb3448f, 'name': SearchDatastore_Task, 'duration_secs': 0.023699} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.089788] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 582.090141] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6105602a-b8eb-4128-a492-b60a9468018f/6105602a-b8eb-4128-a492-b60a9468018f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 582.090430] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bdf7ff56-d9de-40a5-89b2-8e27076c62e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.097875] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 582.097875] env[68233]: value = "task-2781698" [ 582.097875] env[68233]: _type = "Task" [ 582.097875] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.106623] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.174949] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 582.210497] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 582.210497] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.210497] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 582.210794] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.210844] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 582.211259] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 582.214359] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 582.215018] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 582.215018] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 582.215018] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 582.215162] env[68233]: DEBUG nova.virt.hardware [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 582.216081] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72c00a8-0c1a-457f-8a9d-ced371e03759 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.228166] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36cd16c-a154-43f1-9e67-b9de0a499c76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.389911] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 582.390769] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 582.394143] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.684s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.397715] env[68233]: INFO nova.compute.claims [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.459110] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781696, 'name': Rename_Task, 'duration_secs': 0.13561} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.459110] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 582.459443] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af446df3-9217-4c51-93c4-73f7dfa4422c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.467380] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 582.467380] env[68233]: value = "task-2781699" [ 582.467380] env[68233]: _type = "Task" [ 582.467380] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.478819] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781699, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.537437] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781697, 'name': Rename_Task, 'duration_secs': 0.143312} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.537437] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 582.538439] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e859fa2-0211-4257-a73d-a9d2c452435b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.546841] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 582.546841] env[68233]: value = "task-2781700" [ 582.546841] env[68233]: _type = "Task" [ 582.546841] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.556783] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.609027] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781698, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.896088] env[68233]: DEBUG nova.compute.utils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 582.897695] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 582.897695] env[68233]: DEBUG nova.network.neutron [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 582.979232] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781699, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.061159] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781700, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.081113] env[68233]: DEBUG nova.policy [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3468a1b9d4d4803b2a0aa2d5f14d2d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b8fc190f2d84e2baab337b6b03d5eac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 583.116338] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.524116} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.116932] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6105602a-b8eb-4128-a492-b60a9468018f/6105602a-b8eb-4128-a492-b60a9468018f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 583.117606] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 583.118165] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac5a4f25-8bf3-440e-856f-ddb6ce8b7ef9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.133646] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 583.133646] env[68233]: value = "task-2781701" [ 583.133646] env[68233]: _type = "Task" [ 583.133646] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.152074] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.285055] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "68a4e635-381d-4dc2-879c-5581cd5e189a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.285434] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "68a4e635-381d-4dc2-879c-5581cd5e189a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.404608] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 583.489878] env[68233]: DEBUG oslo_vmware.api [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781699, 'name': PowerOnVM_Task, 'duration_secs': 0.711148} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.492983] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 583.493476] env[68233]: INFO nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Took 17.13 seconds to spawn the instance on the hypervisor. [ 583.493789] env[68233]: DEBUG nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 583.499807] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b799088-2301-43ca-83a7-0e586ac7cf9f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.561269] env[68233]: DEBUG oslo_vmware.api [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781700, 'name': PowerOnVM_Task, 'duration_secs': 0.623125} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.561753] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 583.561956] env[68233]: INFO nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Took 5.95 seconds to spawn the instance on the hypervisor. [ 583.562144] env[68233]: DEBUG nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 583.563301] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b33da9-fca8-4a8d-ad74-8b8b5475ea91 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.590103] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce4026a1-c3e7-441b-bf21-43dea0185c12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.598830] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f36af4-27f8-49f6-90c3-26e045971d8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.631803] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e639627-c4be-4160-9b65-cd8cc5733f2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.646842] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f9dabf-98a5-4d67-9bfc-caffe040ebf8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.652893] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075066} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.653059] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 583.654145] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f343d65-18ea-4b0f-a036-f6cad5f6f8f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.664759] env[68233]: DEBUG nova.compute.provider_tree [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 583.684432] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Reconfiguring VM instance instance-00000003 to attach disk [datastore2] 6105602a-b8eb-4128-a492-b60a9468018f/6105602a-b8eb-4128-a492-b60a9468018f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 583.684432] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58613032-049a-4b2b-ac73-915fdbd88600 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.704517] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 583.704517] env[68233]: value = "task-2781702" [ 583.704517] env[68233]: _type = "Task" [ 583.704517] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.713699] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781702, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.787834] env[68233]: DEBUG nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 584.034367] env[68233]: INFO nova.compute.manager [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Took 21.86 seconds to build instance. [ 584.087268] env[68233]: INFO nova.compute.manager [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Took 10.77 seconds to build instance. [ 584.168865] env[68233]: DEBUG nova.scheduler.client.report [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 584.218819] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781702, 'name': ReconfigVM_Task, 'duration_secs': 0.458664} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.218819] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Reconfigured VM instance instance-00000003 to attach disk [datastore2] 6105602a-b8eb-4128-a492-b60a9468018f/6105602a-b8eb-4128-a492-b60a9468018f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 584.218819] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aca02a86-320a-4298-bcdb-75a6f31869fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.223420] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 584.223420] env[68233]: value = "task-2781703" [ 584.223420] env[68233]: _type = "Task" [ 584.223420] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.233744] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781703, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.329470] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.417035] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 584.446270] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 584.446270] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 584.446270] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 584.446413] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 584.446413] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 584.446855] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 584.446855] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 584.447019] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 584.453363] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 584.453363] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 584.453363] env[68233]: DEBUG nova.virt.hardware [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 584.453363] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956e95f1-a52a-4d7c-afd1-a170b6495d35 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.462821] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c34492-4d5a-446d-8d2d-f9f9b2641261 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.538150] env[68233]: DEBUG oslo_concurrency.lockutils [None req-40204fc9-e085-4d08-98e8-62478cf6cb70 tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.370s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.591855] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b281c1c6-b9c7-42f4-be43-498b69bdd82d tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.290s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.675217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.675613] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 584.680794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.045s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.681143] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 584.681345] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 584.681638] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.352s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 584.683968] env[68233]: INFO nova.compute.claims [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 584.687146] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798520d2-0cf4-41de-9733-3982055ee0fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.697078] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0195a7c2-4b76-46d7-addb-f2555ffaed84 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.716022] env[68233]: DEBUG nova.network.neutron [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Successfully created port: 1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 584.717992] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ac2ef0-ddcd-4c8d-8292-ed3177c9e52e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.730252] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3df7e4-8ece-448f-a94b-6744bb66d3a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.738808] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781703, 'name': Rename_Task, 'duration_secs': 0.182218} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.766815] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 584.767379] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181105MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 584.767525] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 584.769305] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f4e6ae86-59fc-4f05-8d37-0eff34d76226 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.777847] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 584.777847] env[68233]: value = "task-2781704" [ 584.777847] env[68233]: _type = "Task" [ 584.777847] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.785839] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781704, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.183225] env[68233]: DEBUG nova.compute.utils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 585.185118] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 585.185118] env[68233]: DEBUG nova.network.neutron [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 585.257465] env[68233]: DEBUG nova.network.neutron [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Successfully updated port: d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 585.291646] env[68233]: DEBUG oslo_vmware.api [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781704, 'name': PowerOnVM_Task, 'duration_secs': 0.473514} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.291902] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 585.292126] env[68233]: INFO nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Took 5.41 seconds to spawn the instance on the hypervisor. [ 585.292294] env[68233]: DEBUG nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 585.293105] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5725ced3-f7fe-49df-815d-3dfdea90e388 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.373678] env[68233]: DEBUG nova.policy [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '673cdb014f4949baa648cb8d661293eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d6e2fbf0f9c4fb0bf99e71506798d7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 585.689859] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 585.766126] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "refresh_cache-34889575-95ea-451c-aa59-49a5f30d4e4c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.766292] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquired lock "refresh_cache-34889575-95ea-451c-aa59-49a5f30d4e4c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.766444] env[68233]: DEBUG nova.network.neutron [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 585.791169] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.791920] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.814601] env[68233]: INFO nova.compute.manager [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Took 11.91 seconds to build instance. [ 585.873040] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc81a58-3885-49bc-af87-ec8b8f6d6122 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.882158] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1594d65b-a5b7-45f5-a0de-3af018dd8c38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.927635] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3473e8a-ed1b-445f-a3df-d13749d4aea3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.938125] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad4cb64-acc4-4c56-a77a-d650a309e367 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.954926] env[68233]: DEBUG nova.compute.provider_tree [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 586.299122] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 586.320432] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf184116-cde0-4e4e-bd98-7c3b5fcb8a6b tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "6105602a-b8eb-4128-a492-b60a9468018f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.426s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 586.461142] env[68233]: DEBUG nova.network.neutron [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 586.462072] env[68233]: DEBUG nova.scheduler.client.report [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 586.707101] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 586.742674] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 586.743586] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 586.743586] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 586.743586] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 586.743586] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 586.744279] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 586.744279] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 586.744444] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 586.744478] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 586.744618] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 586.744792] env[68233]: DEBUG nova.virt.hardware [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 586.745782] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02280631-820f-486f-bbe0-8e94b8bc75a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.755962] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e0a41f-87db-4d65-841a-4da3cbff1bfc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.826201] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.977021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 586.977021] env[68233]: DEBUG nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 586.978891] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.211s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.394878] env[68233]: DEBUG nova.network.neutron [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Updating instance_info_cache with network_info: [{"id": "d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84", "address": "fa:16:3e:5b:73:f8", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2aed54a-2c", "ovs_interfaceid": "d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.476129] env[68233]: DEBUG nova.compute.manager [req-c1c51c5a-d659-4f07-9f7d-d0056d04c9d9 req-a03acb1a-89a0-4529-b9e1-9d63b78133dd service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Received event network-vif-plugged-d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 587.476512] env[68233]: DEBUG oslo_concurrency.lockutils [req-c1c51c5a-d659-4f07-9f7d-d0056d04c9d9 req-a03acb1a-89a0-4529-b9e1-9d63b78133dd service nova] Acquiring lock "34889575-95ea-451c-aa59-49a5f30d4e4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.476933] env[68233]: DEBUG oslo_concurrency.lockutils [req-c1c51c5a-d659-4f07-9f7d-d0056d04c9d9 req-a03acb1a-89a0-4529-b9e1-9d63b78133dd service nova] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.477158] env[68233]: DEBUG oslo_concurrency.lockutils [req-c1c51c5a-d659-4f07-9f7d-d0056d04c9d9 req-a03acb1a-89a0-4529-b9e1-9d63b78133dd service nova] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.477372] env[68233]: DEBUG nova.compute.manager [req-c1c51c5a-d659-4f07-9f7d-d0056d04c9d9 req-a03acb1a-89a0-4529-b9e1-9d63b78133dd service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] No waiting events found dispatching network-vif-plugged-d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 587.477563] env[68233]: WARNING nova.compute.manager [req-c1c51c5a-d659-4f07-9f7d-d0056d04c9d9 req-a03acb1a-89a0-4529-b9e1-9d63b78133dd service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Received unexpected event network-vif-plugged-d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 for instance with vm_state building and task_state spawning. [ 587.481944] env[68233]: DEBUG nova.compute.utils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 587.490244] env[68233]: DEBUG nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 587.630619] env[68233]: DEBUG nova.network.neutron [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Successfully created port: 749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 587.890378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "38c86c2b-9b2b-482e-b26d-066208467202" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.890621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "38c86c2b-9b2b-482e-b26d-066208467202" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.900157] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Releasing lock "refresh_cache-34889575-95ea-451c-aa59-49a5f30d4e4c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.900157] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Instance network_info: |[{"id": "d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84", "address": "fa:16:3e:5b:73:f8", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2aed54a-2c", "ovs_interfaceid": "d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 587.900331] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:73:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.909258] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Creating folder: Project (66d6553299bf4ee2845b37cac3469206). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.910902] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66408105-a262-4d92-b0a6-8d34345de277 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.923533] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Created folder: Project (66d6553299bf4ee2845b37cac3469206) in parent group-v559223. [ 587.923690] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Creating folder: Instances. Parent ref: group-v559233. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.923965] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44c33ba4-5899-425e-93fa-99da8243bd4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.936292] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Created folder: Instances in parent group-v559233. [ 587.936292] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 587.936292] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 587.936292] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-049ef772-5921-415c-8e9d-fb1db1779c07 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.957104] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.957104] env[68233]: value = "task-2781707" [ 587.957104] env[68233]: _type = "Task" [ 587.957104] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.965647] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781707, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.991188] env[68233]: DEBUG nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 588.028032] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance eb5dc742-fa8f-4bac-89cb-afa57b5abe12 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.028834] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.028834] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 6105602a-b8eb-4128-a492-b60a9468018f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.028834] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 34889575-95ea-451c-aa59-49a5f30d4e4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.029255] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 102187bd-0cb2-4496-8dd0-9101b24ee4fa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.029545] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.030614] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 68a4e635-381d-4dc2-879c-5581cd5e189a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 588.059526] env[68233]: INFO nova.compute.manager [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Rebuilding instance [ 588.102275] env[68233]: DEBUG nova.compute.manager [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 588.103160] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81db745b-6cbd-43b4-919f-e8f93bcc7ca0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.304446] env[68233]: DEBUG nova.network.neutron [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Successfully updated port: 1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 588.393476] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 588.467859] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781707, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.533850] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 588.807956] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "refresh_cache-102187bd-0cb2-4496-8dd0-9101b24ee4fa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.809088] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired lock "refresh_cache-102187bd-0cb2-4496-8dd0-9101b24ee4fa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.809088] env[68233]: DEBUG nova.network.neutron [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 588.924370] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.973330] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781707, 'name': CreateVM_Task, 'duration_secs': 0.555511} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.973555] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 588.974305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.974504] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.975010] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 588.975301] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22606198-87cb-4a15-be7d-c84e1663d768 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.982318] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 588.982318] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c49784-df6a-517e-8491-a231c887f319" [ 588.982318] env[68233]: _type = "Task" [ 588.982318] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.990987] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c49784-df6a-517e-8491-a231c887f319, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.007496] env[68233]: DEBUG nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 589.048324] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 38c86c2b-9b2b-482e-b26d-066208467202 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 589.048776] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 589.048845] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 589.122615] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 589.125671] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c49b7666-a5e7-4194-8820-e26654d97788 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.132255] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 589.132255] env[68233]: value = "task-2781708" [ 589.132255] env[68233]: _type = "Task" [ 589.132255] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.142340] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781708, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.173556] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 589.173556] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 589.173556] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 589.173819] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 589.173819] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 589.173887] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 589.174550] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 589.174550] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 589.174550] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 589.174550] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 589.174802] env[68233]: DEBUG nova.virt.hardware [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 589.176114] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42edbd79-562e-4c2a-958d-63e8b4f71ee3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.188467] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9acc6c-75cf-4913-bdca-d80c071e9b5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.205143] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 589.211248] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Creating folder: Project (2cd856f3acdf4bf49df6a39cef3c23e6). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.214429] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3d329ba-474c-4c7a-b9de-cb10ea849595 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.224032] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Created folder: Project (2cd856f3acdf4bf49df6a39cef3c23e6) in parent group-v559223. [ 589.224241] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Creating folder: Instances. Parent ref: group-v559236. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 589.224478] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84ad45e7-7c9e-4817-874f-b2db56c9fa09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.235126] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Created folder: Instances in parent group-v559236. [ 589.235126] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 589.235126] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 589.235126] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf11b001-55a8-47b1-8a1c-c280e152803c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.253239] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac857f0-f1da-4dcf-9c46-f94062109b3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.260040] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 589.260040] env[68233]: value = "task-2781711" [ 589.260040] env[68233]: _type = "Task" [ 589.260040] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.264608] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77727d95-6232-4d20-b9f4-a64dee7381f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.271135] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781711, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.302698] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b605afa5-d85b-47c8-9b8e-6dabae14d8fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.310569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fae93b2-82a3-435f-ad4b-0d5584f757c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.330039] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 589.496466] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c49784-df6a-517e-8491-a231c887f319, 'name': SearchDatastore_Task, 'duration_secs': 0.020752} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.496789] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.496971] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.497272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.497420] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.497704] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.497850] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0825d848-c9b0-4f26-b997-ac654e7d6780 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.508844] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.509112] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 589.509910] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0b63cc6-bec0-4c51-b425-d5712e0ef65b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.516847] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 589.516847] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6df07-15d1-7e40-84dc-df23a97f4b88" [ 589.516847] env[68233]: _type = "Task" [ 589.516847] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.526633] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6df07-15d1-7e40-84dc-df23a97f4b88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.566033] env[68233]: DEBUG nova.network.neutron [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 589.646880] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781708, 'name': PowerOffVM_Task, 'duration_secs': 0.219147} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.647271] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 589.647381] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 589.648192] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4862ce23-7b1b-4569-b83d-0cc9acd30ecc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.655131] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 589.655483] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92359692-998b-49b0-933f-427725e18075 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.678899] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 589.679058] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 589.679242] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Deleting the datastore file [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 589.679494] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8bb6ab2-611a-4fea-a4c7-57798373cd4a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.688063] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 589.688063] env[68233]: value = "task-2781713" [ 589.688063] env[68233]: _type = "Task" [ 589.688063] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.696774] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781713, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.769265] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781711, 'name': CreateVM_Task, 'duration_secs': 0.332374} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.769265] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 589.769519] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.769654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.770655] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 589.770655] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9df1561f-a7f1-40a5-8600-8d08aa2ebf53 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.775517] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 589.775517] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527fb6d4-a939-27c6-99ca-41f6ac789c56" [ 589.775517] env[68233]: _type = "Task" [ 589.775517] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.785726] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527fb6d4-a939-27c6-99ca-41f6ac789c56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.833350] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 590.028130] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6df07-15d1-7e40-84dc-df23a97f4b88, 'name': SearchDatastore_Task, 'duration_secs': 0.021146} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.029909] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b19e302a-d1ca-4768-afe5-3639367060ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.035257] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 590.035257] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52624ee7-4b6c-0ef3-36eb-de78553b174b" [ 590.035257] env[68233]: _type = "Task" [ 590.035257] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.043202] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52624ee7-4b6c-0ef3-36eb-de78553b174b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.050114] env[68233]: DEBUG nova.network.neutron [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Updating instance_info_cache with network_info: [{"id": "1628cd6e-9d87-4435-9443-623c66f0590e", "address": "fa:16:3e:e7:1e:4a", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1628cd6e-9d", "ovs_interfaceid": "1628cd6e-9d87-4435-9443-623c66f0590e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.203459] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781713, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.241193} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.204388] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 590.204388] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 590.204388] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 590.286857] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527fb6d4-a939-27c6-99ca-41f6ac789c56, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.287236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.287580] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 590.288652] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.338755] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 590.339184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.360s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.339270] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.513s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.341014] env[68233]: INFO nova.compute.claims [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 590.553106] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52624ee7-4b6c-0ef3-36eb-de78553b174b, 'name': SearchDatastore_Task, 'duration_secs': 0.018451} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.553443] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.553629] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 34889575-95ea-451c-aa59-49a5f30d4e4c/34889575-95ea-451c-aa59-49a5f30d4e4c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 590.554126] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Releasing lock "refresh_cache-102187bd-0cb2-4496-8dd0-9101b24ee4fa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.554392] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Instance network_info: |[{"id": "1628cd6e-9d87-4435-9443-623c66f0590e", "address": "fa:16:3e:e7:1e:4a", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1628cd6e-9d", "ovs_interfaceid": "1628cd6e-9d87-4435-9443-623c66f0590e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 590.554624] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 590.554802] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 590.556494] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5cd65945-939e-4b29-9f9d-f1af92ab5288 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.558906] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:1e:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1628cd6e-9d87-4435-9443-623c66f0590e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 590.574147] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Creating folder: Project (1b8fc190f2d84e2baab337b6b03d5eac). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.574147] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad5dcdcc-d9f9-493d-868d-c0300ffcb30f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.576027] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-830f61ed-c0a8-40d3-9513-2883a7cfb263 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.585077] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 590.585077] env[68233]: value = "task-2781714" [ 590.585077] env[68233]: _type = "Task" [ 590.585077] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.593152] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 590.593370] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 590.595136] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98ac30a6-bc6d-45df-a8b5-4e98f314d505 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.603233] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.605539] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 590.605539] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d2adbc-06a8-0441-5ddb-60eb2b7b0a62" [ 590.605539] env[68233]: _type = "Task" [ 590.605539] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.606840] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Created folder: Project (1b8fc190f2d84e2baab337b6b03d5eac) in parent group-v559223. [ 590.606840] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Creating folder: Instances. Parent ref: group-v559239. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 590.606840] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-21cbab3c-ed7b-4b81-9da4-f267453f9f30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.619382] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d2adbc-06a8-0441-5ddb-60eb2b7b0a62, 'name': SearchDatastore_Task, 'duration_secs': 0.008304} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.623052] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Created folder: Instances in parent group-v559239. [ 590.623052] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 590.623052] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-730f23e9-b024-4dda-b7bc-e6222d699d09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.625282] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 590.626067] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb965148-b922-4dba-b79d-f9c915aa2e63 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.645164] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 590.645164] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dd4c09-cbd2-b2bb-d284-2e889724f4a9" [ 590.645164] env[68233]: _type = "Task" [ 590.645164] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.649679] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 590.649679] env[68233]: value = "task-2781717" [ 590.649679] env[68233]: _type = "Task" [ 590.649679] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.657050] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dd4c09-cbd2-b2bb-d284-2e889724f4a9, 'name': SearchDatastore_Task, 'duration_secs': 0.008209} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.658113] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.658388] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 590.658878] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f72ca29d-7b8a-49fb-a087-bb2bdcf81055 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.667337] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781717, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.673565] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 590.673565] env[68233]: value = "task-2781718" [ 590.673565] env[68233]: _type = "Task" [ 590.673565] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.685042] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.107946] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781714, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.147432] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "35cbc15b-48d8-4acd-a957-eec3421df1ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.148128] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.168660] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781717, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.189186] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781718, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.281294] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 591.281713] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.281713] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 591.281914] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.282015] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 591.282242] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 591.282568] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 591.282568] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 591.282701] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 591.282858] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 591.283035] env[68233]: DEBUG nova.virt.hardware [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 591.283979] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ff5704-781d-4970-8224-e98a47321fd8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.298215] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7f54cd-20e2-44e7-a756-486f1f5584bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.321055] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 591.327081] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 591.327081] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 591.327081] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-842ea3f4-e947-4211-a147-f5a1c57dfc39 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.349022] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 591.349022] env[68233]: value = "task-2781719" [ 591.349022] env[68233]: _type = "Task" [ 591.349022] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.363516] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781719, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.454036] env[68233]: DEBUG nova.compute.manager [None req-3597acc0-f09c-47c2-831d-de9a7e89b743 tempest-ServerDiagnosticsTest-1889749463 tempest-ServerDiagnosticsTest-1889749463-project-admin] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 591.455709] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc43f899-ab32-4e93-816f-fa682b591c26 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.473086] env[68233]: INFO nova.compute.manager [None req-3597acc0-f09c-47c2-831d-de9a7e89b743 tempest-ServerDiagnosticsTest-1889749463 tempest-ServerDiagnosticsTest-1889749463-project-admin] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Retrieving diagnostics [ 591.473086] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbee17fe-7ad7-4763-95ae-1e93c05cdcba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.605557] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544886} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.610369] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 34889575-95ea-451c-aa59-49a5f30d4e4c/34889575-95ea-451c-aa59-49a5f30d4e4c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 591.610369] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 591.610369] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-98307594-5b3e-47d2-ae69-11a362af54da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.618318] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 591.618318] env[68233]: value = "task-2781720" [ 591.618318] env[68233]: _type = "Task" [ 591.618318] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.634678] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781720, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.642458] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04e0a1a-a6c5-417b-a281-ddf75313e9b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.649770] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a7263e-e493-4fd4-b381-8c4531ce498a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.655333] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 591.701029] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448a2123-2d29-4655-b253-dd4a7f74ef64 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.701029] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781717, 'name': CreateVM_Task, 'duration_secs': 0.650185} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.701029] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.702281] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.702567] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.702769] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 591.703051] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d37dddcf-3a01-4567-a0b4-1eaa671dcaee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.710143] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.869683} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.714314] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d090515b-c284-48a0-9905-6d2a26e8d5ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.721097] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 591.721410] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 591.723383] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a4d3722-e7f3-4d59-b675-fa7ca4ab609d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.726169] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 591.726169] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ee2ae9-bad7-ce82-744d-c054b3ca40e0" [ 591.726169] env[68233]: _type = "Task" [ 591.726169] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.740539] env[68233]: DEBUG nova.compute.provider_tree [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 591.742985] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 591.742985] env[68233]: value = "task-2781721" [ 591.742985] env[68233]: _type = "Task" [ 591.742985] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.751866] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ee2ae9-bad7-ce82-744d-c054b3ca40e0, 'name': SearchDatastore_Task, 'duration_secs': 0.018289} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.752682] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 591.752927] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 591.753193] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.755654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.755654] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 591.759099] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2667fd7a-f927-4c4a-a926-10397cd8b65d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.761607] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.770817] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 591.770817] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 591.771669] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d6935e1-bdbb-4c30-9879-09f7ae2bff9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.777585] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 591.777585] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52509d08-f4db-2d10-f52a-a613a0858846" [ 591.777585] env[68233]: _type = "Task" [ 591.777585] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.789293] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52509d08-f4db-2d10-f52a-a613a0858846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.858433] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781719, 'name': CreateVM_Task, 'duration_secs': 0.418314} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.858724] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 591.859250] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 591.859250] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 591.859791] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 591.860064] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60d4c258-719f-48a9-879c-181460b37f11 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.864620] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 591.864620] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a8efab-7c40-0109-c4c0-14317d466b98" [ 591.864620] env[68233]: _type = "Task" [ 591.864620] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.874343] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a8efab-7c40-0109-c4c0-14317d466b98, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.887103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "ebc4a17b-54fa-49eb-b899-6e339e56f27b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.887103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "ebc4a17b-54fa-49eb-b899-6e339e56f27b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.080514] env[68233]: DEBUG nova.network.neutron [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Successfully updated port: 749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 592.131863] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781720, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18154} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.132030] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 592.135296] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ff6a19-2d8c-4db3-a6b6-2f8c148db174 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.157775] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Reconfiguring VM instance instance-00000004 to attach disk [datastore2] 34889575-95ea-451c-aa59-49a5f30d4e4c/34889575-95ea-451c-aa59-49a5f30d4e4c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 592.158078] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe124157-e7fe-4726-99e3-d543b30c3f7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.181168] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 592.181168] env[68233]: value = "task-2781722" [ 592.181168] env[68233]: _type = "Task" [ 592.181168] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.191998] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781722, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.198855] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.244386] env[68233]: DEBUG nova.scheduler.client.report [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 592.259221] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076004} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.259382] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 592.260154] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-552d0816-222f-4eca-a69e-cf0588cfa07a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.281528] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 592.282746] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08cc1189-0d5f-4329-a240-ce6c56f934be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.315065] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 592.315065] env[68233]: value = "task-2781723" [ 592.315065] env[68233]: _type = "Task" [ 592.315065] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.315561] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52509d08-f4db-2d10-f52a-a613a0858846, 'name': SearchDatastore_Task, 'duration_secs': 0.020978} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.320895] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b08ee73-51a0-499b-94ab-b65d478848fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.327568] env[68233]: DEBUG nova.compute.manager [req-1da9e4a1-9294-4481-b70b-6daa3c5db5b5 req-e27b1dc3-b15c-41ca-bc00-256d033d68a4 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Received event network-vif-plugged-1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 592.327834] env[68233]: DEBUG oslo_concurrency.lockutils [req-1da9e4a1-9294-4481-b70b-6daa3c5db5b5 req-e27b1dc3-b15c-41ca-bc00-256d033d68a4 service nova] Acquiring lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.328092] env[68233]: DEBUG oslo_concurrency.lockutils [req-1da9e4a1-9294-4481-b70b-6daa3c5db5b5 req-e27b1dc3-b15c-41ca-bc00-256d033d68a4 service nova] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.328271] env[68233]: DEBUG oslo_concurrency.lockutils [req-1da9e4a1-9294-4481-b70b-6daa3c5db5b5 req-e27b1dc3-b15c-41ca-bc00-256d033d68a4 service nova] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.328430] env[68233]: DEBUG nova.compute.manager [req-1da9e4a1-9294-4481-b70b-6daa3c5db5b5 req-e27b1dc3-b15c-41ca-bc00-256d033d68a4 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] No waiting events found dispatching network-vif-plugged-1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 592.328583] env[68233]: WARNING nova.compute.manager [req-1da9e4a1-9294-4481-b70b-6daa3c5db5b5 req-e27b1dc3-b15c-41ca-bc00-256d033d68a4 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Received unexpected event network-vif-plugged-1628cd6e-9d87-4435-9443-623c66f0590e for instance with vm_state building and task_state spawning. [ 592.338399] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781723, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.338720] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 592.338720] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ddcf20-e3a1-0e17-d3ce-298548b59d16" [ 592.338720] env[68233]: _type = "Task" [ 592.338720] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.351815] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ddcf20-e3a1-0e17-d3ce-298548b59d16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.382172] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a8efab-7c40-0109-c4c0-14317d466b98, 'name': SearchDatastore_Task, 'duration_secs': 0.028094} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.382172] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.382172] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 592.382172] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.391480] env[68233]: DEBUG nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.585239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 592.586731] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.586731] env[68233]: DEBUG nova.network.neutron [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 592.694651] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.754645] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 592.755227] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 592.758448] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.834s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.760232] env[68233]: INFO nova.compute.claims [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 592.827695] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781723, 'name': ReconfigVM_Task, 'duration_secs': 0.350645} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.828046] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.828661] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5df63fd8-5990-41b7-819c-5ff02817eccf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.835935] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 592.835935] env[68233]: value = "task-2781724" [ 592.835935] env[68233]: _type = "Task" [ 592.835935] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.848071] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781724, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.851406] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ddcf20-e3a1-0e17-d3ce-298548b59d16, 'name': SearchDatastore_Task, 'duration_secs': 0.026144} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.851406] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 592.851406] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 102187bd-0cb2-4496-8dd0-9101b24ee4fa/102187bd-0cb2-4496-8dd0-9101b24ee4fa.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 592.851605] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 592.851605] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 592.851802] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e55e81d3-1213-4c47-8b14-1cbd8d4b0f6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.854035] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4460ed9-76cc-4437-82fe-86c63095e4bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.861264] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 592.861264] env[68233]: value = "task-2781725" [ 592.861264] env[68233]: _type = "Task" [ 592.861264] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.866195] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 592.866416] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 592.867596] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f900f17-ae1e-41fa-8c5b-c2c3e4aa85d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.873527] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.876612] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 592.876612] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52834483-2a0f-8bcc-f0dc-15153b5c2f11" [ 592.876612] env[68233]: _type = "Task" [ 592.876612] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.884895] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52834483-2a0f-8bcc-f0dc-15153b5c2f11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.928251] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.176852] env[68233]: DEBUG nova.network.neutron [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 593.195243] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781722, 'name': ReconfigVM_Task, 'duration_secs': 0.995806} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.195574] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Reconfigured VM instance instance-00000004 to attach disk [datastore2] 34889575-95ea-451c-aa59-49a5f30d4e4c/34889575-95ea-451c-aa59-49a5f30d4e4c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 593.196383] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28e08d0d-c932-443d-91a0-42b6cfa77155 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.204186] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 593.204186] env[68233]: value = "task-2781726" [ 593.204186] env[68233]: _type = "Task" [ 593.204186] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.226766] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781726, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.264782] env[68233]: DEBUG nova.compute.utils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 593.272361] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 593.273127] env[68233]: DEBUG nova.network.neutron [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 593.347343] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781724, 'name': Rename_Task, 'duration_secs': 0.163577} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.347800] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 593.348099] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d125df7a-eeb6-4a8a-889d-232cfad68028 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.354431] env[68233]: DEBUG nova.policy [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9eb18c1b665848c2a0df8ca715417ab7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1939e7276d1c4f23bf462114f85e72a8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 593.359225] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 593.359225] env[68233]: value = "task-2781727" [ 593.359225] env[68233]: _type = "Task" [ 593.359225] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.371878] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781727, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.374917] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781725, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.391230] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52834483-2a0f-8bcc-f0dc-15153b5c2f11, 'name': SearchDatastore_Task, 'duration_secs': 0.012793} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.392234] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe6af89-6991-4daf-bcd4-0b3b57e178b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.401578] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 593.401578] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5283c876-8aa5-35bb-bee8-84d4c6d8e810" [ 593.401578] env[68233]: _type = "Task" [ 593.401578] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.413761] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5283c876-8aa5-35bb-bee8-84d4c6d8e810, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.622139] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.622191] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.643247] env[68233]: DEBUG nova.compute.manager [None req-b3769953-34cc-4bd8-8dc4-dcaa4384642b tempest-ServerDiagnosticsV248Test-1749919594 tempest-ServerDiagnosticsV248Test-1749919594-project-admin] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 593.645452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66dd3736-66b1-41c9-8463-fd5121563876 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.653430] env[68233]: INFO nova.compute.manager [None req-b3769953-34cc-4bd8-8dc4-dcaa4384642b tempest-ServerDiagnosticsV248Test-1749919594 tempest-ServerDiagnosticsV248Test-1749919594-project-admin] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Retrieving diagnostics [ 593.654582] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8576357-f0b3-4c7f-abb0-c424fda653b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.715199] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781726, 'name': Rename_Task, 'duration_secs': 0.462426} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.715541] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 593.715541] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a0f0909-50df-490d-88fd-23c7fe090016 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.722505] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 593.722505] env[68233]: value = "task-2781728" [ 593.722505] env[68233]: _type = "Task" [ 593.722505] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.730232] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.781101] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 593.792204] env[68233]: DEBUG nova.compute.manager [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Received event network-changed-d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 593.792204] env[68233]: DEBUG nova.compute.manager [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Refreshing instance network info cache due to event network-changed-d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 593.792204] env[68233]: DEBUG oslo_concurrency.lockutils [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] Acquiring lock "refresh_cache-34889575-95ea-451c-aa59-49a5f30d4e4c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.792204] env[68233]: DEBUG oslo_concurrency.lockutils [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] Acquired lock "refresh_cache-34889575-95ea-451c-aa59-49a5f30d4e4c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 593.792204] env[68233]: DEBUG nova.network.neutron [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Refreshing network info cache for port d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 593.873726] env[68233]: DEBUG oslo_vmware.api [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781727, 'name': PowerOnVM_Task, 'duration_secs': 0.500034} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.877044] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 593.877344] env[68233]: INFO nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Took 4.87 seconds to spawn the instance on the hypervisor. [ 593.878051] env[68233]: DEBUG nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 593.878051] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67989} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.880996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ab9175-18d7-4131-9b1c-f6849942da44 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.883946] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 102187bd-0cb2-4496-8dd0-9101b24ee4fa/102187bd-0cb2-4496-8dd0-9101b24ee4fa.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 593.884437] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 593.884960] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8453358d-2017-4ab2-b91a-b1bf3c02fdcc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.896910] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 593.896910] env[68233]: value = "task-2781729" [ 593.896910] env[68233]: _type = "Task" [ 593.896910] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.911461] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.918164] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5283c876-8aa5-35bb-bee8-84d4c6d8e810, 'name': SearchDatastore_Task, 'duration_secs': 0.060395} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.918164] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 593.918164] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 593.918164] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-778fa03f-2400-47b6-b270-2bae2eee2ed8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.924977] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 593.924977] env[68233]: value = "task-2781730" [ 593.924977] env[68233]: _type = "Task" [ 593.924977] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.938655] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781730, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.024086] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f084506c-a302-485d-8f22-a070de38a0b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.032724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b03f5c0-5e5e-45fc-b935-738850177467 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.066013] env[68233]: DEBUG nova.network.neutron [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance_info_cache with network_info: [{"id": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "address": "fa:16:3e:89:49:e6", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749b7b47-86", "ovs_interfaceid": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.071717] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f50ce262-cc7e-4bf4-9205-1f17e186f8a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.077239] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29502df6-46f5-462b-80cc-8feb763d50f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.091974] env[68233]: DEBUG nova.compute.provider_tree [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.125244] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.234486] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781728, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.416295] env[68233]: INFO nova.compute.manager [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Took 10.12 seconds to build instance. [ 594.428029] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072781} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.428029] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 594.434747] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f66e0c7-8e66-4fa1-8183-23ddb75dec8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.475484] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 102187bd-0cb2-4496-8dd0-9101b24ee4fa/102187bd-0cb2-4496-8dd0-9101b24ee4fa.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 594.476832] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781730, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.477139] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27c25c5c-f615-40b6-b441-1d59a9993dd6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.495290] env[68233]: DEBUG nova.network.neutron [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Successfully created port: cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 594.503385] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 594.503385] env[68233]: value = "task-2781731" [ 594.503385] env[68233]: _type = "Task" [ 594.503385] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.515578] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.572756] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 594.572756] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Instance network_info: |[{"id": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "address": "fa:16:3e:89:49:e6", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749b7b47-86", "ovs_interfaceid": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 594.575140] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:49:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '749b7b47-864a-4c70-804b-9e57cc1b14a5', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 594.605138] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Creating folder: Project (8d6e2fbf0f9c4fb0bf99e71506798d7c). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.615210] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c077f46-61e3-46ee-ab96-8e9c05d4a106 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.637855] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Created folder: Project (8d6e2fbf0f9c4fb0bf99e71506798d7c) in parent group-v559223. [ 594.638061] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Creating folder: Instances. Parent ref: group-v559243. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.638302] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7784a13-fa7f-44cd-9596-ee656cacf806 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.641138] env[68233]: ERROR nova.scheduler.client.report [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [req-419c52d2-1c97-4507-b0e1-5df6d3573143] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-419c52d2-1c97-4507-b0e1-5df6d3573143"}]} [ 594.651315] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Created folder: Instances in parent group-v559243. [ 594.652228] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 594.652228] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 594.652228] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-580e31f8-f94a-45f2-bd5b-06c3ced0ed4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.672137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.675584] env[68233]: DEBUG nova.scheduler.client.report [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 594.679580] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.679580] env[68233]: value = "task-2781734" [ 594.679580] env[68233]: _type = "Task" [ 594.679580] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.687883] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781734, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.695449] env[68233]: DEBUG nova.scheduler.client.report [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 594.696023] env[68233]: DEBUG nova.compute.provider_tree [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 594.722160] env[68233]: DEBUG nova.scheduler.client.report [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 594.734348] env[68233]: DEBUG oslo_vmware.api [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781728, 'name': PowerOnVM_Task, 'duration_secs': 0.953439} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.734990] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 594.734990] env[68233]: INFO nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Took 12.56 seconds to spawn the instance on the hypervisor. [ 594.735180] env[68233]: DEBUG nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 594.735839] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92012b8-e161-4ce7-9b87-4f6d278bb602 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.753937] env[68233]: DEBUG nova.scheduler.client.report [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 594.797188] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 594.835362] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 594.835562] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 594.835737] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 594.835939] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 594.836095] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 594.836239] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 594.836445] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 594.837449] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 594.837449] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 594.837449] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 594.837449] env[68233]: DEBUG nova.virt.hardware [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 594.838794] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd9b54a-5de6-4013-9f9c-ddd688fc99bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.853021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6685e6-6b64-4f5d-9411-eb913f5c8b01 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.927027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36481bd6-6704-4426-8ed3-64c24f9aa764 tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "68a4e635-381d-4dc2-879c-5581cd5e189a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.641s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.944088] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781730, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.710858} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.944808] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 594.944949] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 594.948032] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-398f1aa5-896d-4bd2-ba12-b7abc61fa07c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.954110] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 594.954110] env[68233]: value = "task-2781735" [ 594.954110] env[68233]: _type = "Task" [ 594.954110] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.968108] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781735, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.013707] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781731, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.016740] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8141d51-d048-4194-bc66-dbf21053d0b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.025285] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f9f21b-9279-40a5-bc0c-111323097072 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.063264] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2412dd13-4f70-4d99-9feb-20a4bdc2f974 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.076094] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b88741c-9e53-4ef9-93b2-2110538aa0dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.091870] env[68233]: DEBUG nova.compute.provider_tree [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.195916] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781734, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.266637] env[68233]: INFO nova.compute.manager [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Took 21.01 seconds to build instance. [ 595.471340] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781735, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076234} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.471340] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 595.471340] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef0a87f-371d-4442-8dcf-4811407b331c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.493904] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 595.495500] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f4022e4-007d-4c7d-9118-878c8c70c56b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.519155] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 595.519155] env[68233]: value = "task-2781736" [ 595.519155] env[68233]: _type = "Task" [ 595.519155] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.525021] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781731, 'name': ReconfigVM_Task, 'duration_secs': 0.625769} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.526999] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 102187bd-0cb2-4496-8dd0-9101b24ee4fa/102187bd-0cb2-4496-8dd0-9101b24ee4fa.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 595.527805] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fac3d873-8065-4d28-bfbc-52aef03e2abd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.536517] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781736, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.538215] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 595.538215] env[68233]: value = "task-2781737" [ 595.538215] env[68233]: _type = "Task" [ 595.538215] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.546840] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781737, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.598978] env[68233]: DEBUG nova.scheduler.client.report [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 595.691803] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781734, 'name': CreateVM_Task, 'duration_secs': 0.613106} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.694553] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 595.694553] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.694553] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 595.694553] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 595.694553] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d99498f4-2045-4055-8902-10dfad30d5f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.698365] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 595.698365] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520ed393-bf6a-561a-7d51-da0d61eae5e4" [ 595.698365] env[68233]: _type = "Task" [ 595.698365] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.713318] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520ed393-bf6a-561a-7d51-da0d61eae5e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.771134] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab033d1e-0ff0-4950-96a6-3a79b01b5f6b tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.520s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.780979] env[68233]: DEBUG nova.network.neutron [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Updated VIF entry in instance network info cache for port d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 595.780979] env[68233]: DEBUG nova.network.neutron [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Updating instance_info_cache with network_info: [{"id": "d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84", "address": "fa:16:3e:5b:73:f8", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2aed54a-2c", "ovs_interfaceid": "d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.037330] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781736, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.056249] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781737, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.105767] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.347s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.109018] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 596.110548] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.913s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.118331] env[68233]: INFO nova.compute.claims [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.213464] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520ed393-bf6a-561a-7d51-da0d61eae5e4, 'name': SearchDatastore_Task, 'duration_secs': 0.019977} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.213886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.214149] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 596.214426] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.214620] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.214815] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 596.215110] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e3c5db3-976b-4a8c-86a3-e077b39b868d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.233023] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 596.233023] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 596.233023] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c1733eb-bba2-4a3b-9140-faf1f1da69dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.238646] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 596.238646] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524801c8-04ce-9975-0fd7-cac94c62563a" [ 596.238646] env[68233]: _type = "Task" [ 596.238646] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.248980] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524801c8-04ce-9975-0fd7-cac94c62563a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.284743] env[68233]: DEBUG oslo_concurrency.lockutils [req-35edbd61-1c43-41c6-9bea-4b484ce1406a req-b912ded4-d599-4df9-97ab-ded52376838c service nova] Releasing lock "refresh_cache-34889575-95ea-451c-aa59-49a5f30d4e4c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.406950] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "87385201-3118-4a8e-9739-db3b431566c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.407624] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "87385201-3118-4a8e-9739-db3b431566c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.541234] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781736, 'name': ReconfigVM_Task, 'duration_secs': 0.754966} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.544928] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Reconfigured VM instance instance-00000002 to attach disk [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 596.545938] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd15ca93-c0af-4329-b8d2-2d41f096d9ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.553463] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781737, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.557679] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 596.557679] env[68233]: value = "task-2781738" [ 596.557679] env[68233]: _type = "Task" [ 596.557679] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.566423] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781738, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.621502] env[68233]: DEBUG nova.compute.utils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.633361] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 596.633662] env[68233]: DEBUG nova.network.neutron [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.646843] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.647175] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.753964] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524801c8-04ce-9975-0fd7-cac94c62563a, 'name': SearchDatastore_Task, 'duration_secs': 0.014555} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.753964] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-489b9f22-c2d5-4d42-8b69-3889cab23338 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.762995] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 596.762995] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5272a683-10c1-8965-d878-9086742054a5" [ 596.762995] env[68233]: _type = "Task" [ 596.762995] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.771714] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5272a683-10c1-8965-d878-9086742054a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.789691] env[68233]: DEBUG nova.policy [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '434654c75b9b4ddaaf3714b355c2a5bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd921fe0876de499dbc86529a00b2c6f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.910524] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 596.991908] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.991908] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.991908] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.991908] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.992142] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.994443] env[68233]: INFO nova.compute.manager [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Terminating instance [ 597.022316] env[68233]: DEBUG nova.network.neutron [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Successfully updated port: cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 597.056505] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781737, 'name': Rename_Task, 'duration_secs': 1.170281} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.061020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 597.062138] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-825d494a-ae61-4057-8813-3b80b458f832 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.067471] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.067723] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 597.073990] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781738, 'name': Rename_Task, 'duration_secs': 0.262156} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.074782] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 597.075059] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52ec6edf-ddc5-4926-a6fc-f1c97ede9ff8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.079055] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 597.079055] env[68233]: value = "task-2781739" [ 597.079055] env[68233]: _type = "Task" [ 597.079055] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.084338] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 597.084338] env[68233]: value = "task-2781740" [ 597.084338] env[68233]: _type = "Task" [ 597.084338] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.092949] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.098191] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781740, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.137237] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 597.150781] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 597.281320] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5272a683-10c1-8965-d878-9086742054a5, 'name': SearchDatastore_Task, 'duration_secs': 0.032254} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.284186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.284527] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f/2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 597.285748] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deeb2c23-aa9e-49e2-958c-85fe3d282dfd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.294730] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 597.294730] env[68233]: value = "task-2781741" [ 597.294730] env[68233]: _type = "Task" [ 597.294730] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.300584] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781741, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.450115] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.492586] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4518fc4b-c40e-46d6-93bf-d56875e0548e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.501864] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa35cc8-2115-4c76-bacc-e6bcff15ae5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.510233] env[68233]: DEBUG nova.compute.manager [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 597.510460] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 597.511281] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe90a94-e8bf-4a96-91f0-23bdb3d7f7e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.523249] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 597.563771] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f40a3c15-e27e-4fb8-b80d-6a9f3f363948 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.566232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.566376] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquired lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.566528] env[68233]: DEBUG nova.network.neutron [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.571154] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0603645-fc3b-42a9-857a-092f4ddabb34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.576344] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 597.586288] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 597.586288] env[68233]: value = "task-2781742" [ 597.586288] env[68233]: _type = "Task" [ 597.586288] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.598452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d2c1577-783d-4bba-a663-56022371cade {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.619783] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781739, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.646858] env[68233]: DEBUG nova.compute.provider_tree [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.651857] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781740, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.670407] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781742, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.694054] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.789309] env[68233]: DEBUG nova.compute.manager [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Received event network-changed-1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 597.789585] env[68233]: DEBUG nova.compute.manager [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Refreshing instance network info cache due to event network-changed-1628cd6e-9d87-4435-9443-623c66f0590e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 597.789723] env[68233]: DEBUG oslo_concurrency.lockutils [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] Acquiring lock "refresh_cache-102187bd-0cb2-4496-8dd0-9101b24ee4fa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.789861] env[68233]: DEBUG oslo_concurrency.lockutils [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] Acquired lock "refresh_cache-102187bd-0cb2-4496-8dd0-9101b24ee4fa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.793287] env[68233]: DEBUG nova.network.neutron [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Refreshing network info cache for port 1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 597.810599] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781741, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.062301] env[68233]: DEBUG nova.network.neutron [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Successfully created port: 486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.103036] env[68233]: DEBUG oslo_vmware.api [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2781739, 'name': PowerOnVM_Task, 'duration_secs': 0.88035} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.107186] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 598.107186] env[68233]: INFO nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Took 13.69 seconds to spawn the instance on the hypervisor. [ 598.107186] env[68233]: DEBUG nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 598.107467] env[68233]: DEBUG oslo_vmware.api [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781740, 'name': PowerOnVM_Task, 'duration_secs': 0.889293} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.109070] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.109850] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80e62cb-9d6c-43b4-8fec-445f35f6eeda {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.112837] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 598.113152] env[68233]: DEBUG nova.compute.manager [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 598.117323] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d7ce64a-91d9-45bf-a3ef-30933e626823 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.121219] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781742, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.133840] env[68233]: DEBUG nova.network.neutron [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 598.153212] env[68233]: DEBUG nova.scheduler.client.report [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 598.158640] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 598.205192] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 598.205450] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.205599] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 598.205839] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.205988] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 598.206146] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 598.206353] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 598.206508] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 598.206681] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 598.206871] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 598.207059] env[68233]: DEBUG nova.virt.hardware [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 598.207936] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae1c304-d7c3-4fb2-92ad-b30c1c1a76ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.224052] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b674d422-c050-45d1-a6c2-767cf36ae4db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.306822] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781741, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.677857} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.309661] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f/2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 598.309893] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 598.310821] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-571dba19-ac50-46e4-bef4-c3abc49706b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.319618] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 598.319618] env[68233]: value = "task-2781743" [ 598.319618] env[68233]: _type = "Task" [ 598.319618] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.329304] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.437355] env[68233]: DEBUG nova.network.neutron [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updating instance_info_cache with network_info: [{"id": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "address": "fa:16:3e:17:11:83", "network": {"id": "ea80e702-bc90-4c9c-b2a4-255391807764", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1601733849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1939e7276d1c4f23bf462114f85e72a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd34c1dc-df", "ovs_interfaceid": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.607604] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781742, 'name': PowerOffVM_Task, 'duration_secs': 0.540217} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.608531] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 598.609089] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 598.609433] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48c414b0-64bf-40b3-9fcf-96e8a3046363 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.652186] env[68233]: INFO nova.compute.manager [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Took 24.17 seconds to build instance. [ 598.657815] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.658562] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.671829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.744s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.673869] env[68233]: INFO nova.compute.claims [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.683561] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.689094] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 598.689322] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 598.689505] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleting the datastore file [datastore2] eb5dc742-fa8f-4bac-89cb-afa57b5abe12 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 598.689757] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89dc1afe-fecf-421f-bd93-e11e402e1919 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.699655] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for the task: (returnval){ [ 598.699655] env[68233]: value = "task-2781745" [ 598.699655] env[68233]: _type = "Task" [ 598.699655] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.712876] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.830857] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.301537} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.831219] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 598.831992] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ab5e22-c1a3-4be1-9d1e-0d3eb58208b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.855020] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f/2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 598.857508] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c977655-7df2-4a95-aa2c-1c671a7f9c43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.879388] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 598.879388] env[68233]: value = "task-2781746" [ 598.879388] env[68233]: _type = "Task" [ 598.879388] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.887036] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781746, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.940369] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Releasing lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.942845] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Instance network_info: |[{"id": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "address": "fa:16:3e:17:11:83", "network": {"id": "ea80e702-bc90-4c9c-b2a4-255391807764", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1601733849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1939e7276d1c4f23bf462114f85e72a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd34c1dc-df", "ovs_interfaceid": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 598.943046] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:11:83', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f972c061-0cd5-4aed-8cfb-42cc4a08835a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd34c1dc-df6e-4115-b9e9-55df77ee36c9', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.951561] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Creating folder: Project (1939e7276d1c4f23bf462114f85e72a8). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.952342] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8872c515-0f6d-4463-8d81-aff51f95fdd8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.964118] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Created folder: Project (1939e7276d1c4f23bf462114f85e72a8) in parent group-v559223. [ 598.964568] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Creating folder: Instances. Parent ref: group-v559246. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.964951] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6414ea7e-d702-4213-82b8-904f12f06daf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.977025] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Created folder: Instances in parent group-v559246. [ 598.977025] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 598.977025] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 598.977025] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de78b2f3-2254-4655-8d6f-5f07c4abbbda {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.000284] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.000284] env[68233]: value = "task-2781749" [ 599.000284] env[68233]: _type = "Task" [ 599.000284] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.009092] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781749, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.155602] env[68233]: DEBUG oslo_concurrency.lockutils [None req-763ffc94-c7c5-4f2f-b0e6-6bba652faf6f tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.692s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.168479] env[68233]: DEBUG nova.compute.utils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 599.173350] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 599.173350] env[68233]: DEBUG nova.network.neutron [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.212257] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781745, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.260174] env[68233]: DEBUG nova.policy [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d1fb4f1c8be43c287e3410a8921836d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4520e2c17033482598edf2bbde01450c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 599.295559] env[68233]: DEBUG nova.network.neutron [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Updated VIF entry in instance network info cache for port 1628cd6e-9d87-4435-9443-623c66f0590e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 599.295559] env[68233]: DEBUG nova.network.neutron [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Updating instance_info_cache with network_info: [{"id": "1628cd6e-9d87-4435-9443-623c66f0590e", "address": "fa:16:3e:e7:1e:4a", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.217", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1628cd6e-9d", "ovs_interfaceid": "1628cd6e-9d87-4435-9443-623c66f0590e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 599.388264] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.516373] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781749, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.676899] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.714354] env[68233]: DEBUG oslo_vmware.api [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Task: {'id': task-2781745, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.609148} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.714659] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 599.714946] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 599.715110] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 599.715328] env[68233]: INFO nova.compute.manager [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Took 2.20 seconds to destroy the instance on the hypervisor. [ 599.715584] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 599.716202] env[68233]: DEBUG nova.compute.manager [-] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 599.716202] env[68233]: DEBUG nova.network.neutron [-] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 599.798544] env[68233]: DEBUG oslo_concurrency.lockutils [req-b173e7f0-be90-41e0-a742-676fb057c55b req-22f45131-cbec-48d3-b78b-37ef79e797f1 service nova] Releasing lock "refresh_cache-102187bd-0cb2-4496-8dd0-9101b24ee4fa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 599.836676] env[68233]: DEBUG nova.compute.manager [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Received event network-vif-plugged-749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 599.838089] env[68233]: DEBUG oslo_concurrency.lockutils [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] Acquiring lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.838089] env[68233]: DEBUG oslo_concurrency.lockutils [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.838089] env[68233]: DEBUG oslo_concurrency.lockutils [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.838089] env[68233]: DEBUG nova.compute.manager [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] No waiting events found dispatching network-vif-plugged-749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 599.838089] env[68233]: WARNING nova.compute.manager [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Received unexpected event network-vif-plugged-749b7b47-864a-4c70-804b-9e57cc1b14a5 for instance with vm_state building and task_state spawning. [ 599.838411] env[68233]: DEBUG nova.compute.manager [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Received event network-changed-749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 599.838411] env[68233]: DEBUG nova.compute.manager [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Refreshing instance network info cache due to event network-changed-749b7b47-864a-4c70-804b-9e57cc1b14a5. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 599.838411] env[68233]: DEBUG oslo_concurrency.lockutils [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] Acquiring lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.838411] env[68233]: DEBUG oslo_concurrency.lockutils [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] Acquired lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.838411] env[68233]: DEBUG nova.network.neutron [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Refreshing network info cache for port 749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.893214] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781746, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.992603] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ca755b-31e3-43f5-8ce0-f90557a043ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.001773] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bbe5b3-8684-47c0-8153-bb95f58a1e19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.043494] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781749, 'name': CreateVM_Task, 'duration_secs': 0.64557} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.043907] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 600.044817] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6187ccc-56a5-4c9f-a1fe-a3cd79bc6b6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.048078] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.048245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.048759] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 600.048855] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49c3711a-7ec5-43ce-846b-3b07ba8b490c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.057630] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48315e1-9e37-4b64-b413-681068a74770 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.061755] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 600.061755] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5230c859-22da-ac94-8927-9267e6c8469d" [ 600.061755] env[68233]: _type = "Task" [ 600.061755] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.073637] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 600.080576] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5230c859-22da-ac94-8927-9267e6c8469d, 'name': SearchDatastore_Task, 'duration_secs': 0.010932} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.080951] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.081899] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.081899] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.081899] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.081899] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 600.082099] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4ef4c46-797b-4497-8810-51374d1e7e54 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.090974] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 600.091178] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 600.091913] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26117f7b-b541-4018-959c-5a327a69e7ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.100916] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 600.100916] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d845f5-8c05-50a4-e27f-7c94a9b7ccea" [ 600.100916] env[68233]: _type = "Task" [ 600.100916] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.109731] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d845f5-8c05-50a4-e27f-7c94a9b7ccea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.220155] env[68233]: DEBUG nova.network.neutron [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Successfully created port: db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.396913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "d19421ad-88d5-4479-a6e4-c6d59e863b31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.397375] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.405154] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781746, 'name': ReconfigVM_Task, 'duration_secs': 1.238033} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.405554] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f/2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 600.408365] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db2aa87e-f27d-4151-8eb7-d40333628fd2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.413833] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 600.413833] env[68233]: value = "task-2781750" [ 600.413833] env[68233]: _type = "Task" [ 600.413833] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.423627] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781750, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.611194] env[68233]: ERROR nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [req-1f4c320f-41c2-4a71-b971-ad4392213201] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1f4c320f-41c2-4a71-b971-ad4392213201"}]} [ 600.621037] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d845f5-8c05-50a4-e27f-7c94a9b7ccea, 'name': SearchDatastore_Task, 'duration_secs': 0.01108} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.621929] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b902e653-f11a-44d8-8ac4-cc4e7ad2d011 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.628619] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 600.628619] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263d9bb-d884-76a0-94aa-b5133a3a04d8" [ 600.628619] env[68233]: _type = "Task" [ 600.628619] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.633962] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 600.645095] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263d9bb-d884-76a0-94aa-b5133a3a04d8, 'name': SearchDatastore_Task, 'duration_secs': 0.011755} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.645095] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.645692] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6/9eeb90c6-6ac2-43cb-887a-b69a28dc43a6.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 600.645692] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e6ea7bb-94c7-4c34-b798-12c6eac183ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.653618] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 600.653618] env[68233]: value = "task-2781751" [ 600.653618] env[68233]: _type = "Task" [ 600.653618] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.659091] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 600.659091] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 600.669129] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781751, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.679379] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 600.689463] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.722267] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 600.734300] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:49:33Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1011053998',id=25,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1623685536',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.734564] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.734729] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.734932] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.736627] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.737213] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.737591] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.737794] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.738035] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.738241] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.738536] env[68233]: DEBUG nova.virt.hardware [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.740233] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7003ac-8e38-483f-955e-9d8efb5c4915 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.749273] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ee0df3-1adb-4141-80e1-0161e7d47ce8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.901117] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 600.945179] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781750, 'name': Rename_Task, 'duration_secs': 0.175386} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.945604] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 600.945885] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0030fa9-45af-4cb4-bcc0-7d411619c122 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.957828] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 600.957828] env[68233]: value = "task-2781752" [ 600.957828] env[68233]: _type = "Task" [ 600.957828] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.975395] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781752, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.038318] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84626d9b-0b2a-4a71-8796-6c8bca26c51f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.048556] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50c431b-9043-4db8-8d3e-4940978a431f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.090025] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb56aee-c3ec-4cab-a1c0-c11d47cff5ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.100922] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f564dee-e892-4305-a507-688ca89ca53c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.106696] env[68233]: DEBUG nova.network.neutron [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updated VIF entry in instance network info cache for port 749b7b47-864a-4c70-804b-9e57cc1b14a5. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 601.106696] env[68233]: DEBUG nova.network.neutron [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance_info_cache with network_info: [{"id": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "address": "fa:16:3e:89:49:e6", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749b7b47-86", "ovs_interfaceid": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.127483] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 601.166986] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781751, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.289104] env[68233]: DEBUG nova.network.neutron [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Successfully updated port: 486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 601.341600] env[68233]: DEBUG nova.network.neutron [-] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.441580] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.474243] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781752, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.612751] env[68233]: DEBUG oslo_concurrency.lockutils [req-902f3dac-2858-4bb9-a074-d110af909a40 req-d3052f9a-a4a0-400a-8e33-6819b42b7877 service nova] Releasing lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.664263] env[68233]: ERROR nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [req-8da0e2b2-15a0-42a5-9a55-5226160524ca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8da0e2b2-15a0-42a5-9a55-5226160524ca"}]} [ 601.691254] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781751, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57176} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.691617] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6/9eeb90c6-6ac2-43cb-887a-b69a28dc43a6.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 601.691847] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 601.692110] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0973c89-df85-49e9-b201-a4f7dc249bb5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.699689] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 601.699689] env[68233]: value = "task-2781753" [ 601.699689] env[68233]: _type = "Task" [ 601.699689] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.700674] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 601.712253] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781753, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.724852] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 601.724852] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 601.745013] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 601.785463] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 601.795350] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "refresh_cache-38c86c2b-9b2b-482e-b26d-066208467202" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.795463] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "refresh_cache-38c86c2b-9b2b-482e-b26d-066208467202" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.795595] env[68233]: DEBUG nova.network.neutron [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 601.804725] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "19a1441d-9621-4e6e-ac38-8ad08206facf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.805825] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.805825] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "90d88fcb-6141-499c-b049-ddfc9e210d5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.805825] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.847854] env[68233]: INFO nova.compute.manager [-] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Took 2.13 seconds to deallocate network for instance. [ 601.976495] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781752, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.128661] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3e562d-f962-4d8f-bc1b-b3b44f4804d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.137421] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-907a65d7-5a28-4e00-aae9-56b4b2a2afc3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.174411] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef575208-5c02-43a0-9510-a3a3b9a3bb0a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.182423] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d685321b-100f-4bf4-8d71-4ae6b36918e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.200893] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 602.211646] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781753, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.333973} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.212439] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.213193] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e36decc-ffbc-4445-986a-731349906964 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.243471] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6/9eeb90c6-6ac2-43cb-887a-b69a28dc43a6.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 602.243471] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c073f329-ca04-491d-a49f-9abcf891c682 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.262663] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 602.262663] env[68233]: value = "task-2781754" [ 602.262663] env[68233]: _type = "Task" [ 602.262663] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.270812] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781754, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.357469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.363149] env[68233]: DEBUG nova.network.neutron [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 602.475957] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781752, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.700835] env[68233]: DEBUG nova.compute.manager [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Received event network-vif-plugged-cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 602.700835] env[68233]: DEBUG oslo_concurrency.lockutils [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.702245] env[68233]: DEBUG oslo_concurrency.lockutils [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.702550] env[68233]: DEBUG oslo_concurrency.lockutils [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.702867] env[68233]: DEBUG nova.compute.manager [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] No waiting events found dispatching network-vif-plugged-cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 602.702867] env[68233]: WARNING nova.compute.manager [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Received unexpected event network-vif-plugged-cd34c1dc-df6e-4115-b9e9-55df77ee36c9 for instance with vm_state building and task_state spawning. [ 602.703114] env[68233]: DEBUG nova.compute.manager [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Received event network-changed-cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 602.703198] env[68233]: DEBUG nova.compute.manager [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Refreshing instance network info cache due to event network-changed-cd34c1dc-df6e-4115-b9e9-55df77ee36c9. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 602.703393] env[68233]: DEBUG oslo_concurrency.lockutils [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] Acquiring lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.703528] env[68233]: DEBUG oslo_concurrency.lockutils [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] Acquired lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.703688] env[68233]: DEBUG nova.network.neutron [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Refreshing network info cache for port cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 602.728156] env[68233]: ERROR nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [req-4323fda8-c983-4d69-98d5-7120416fc3d0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4323fda8-c983-4d69-98d5-7120416fc3d0"}]} [ 602.731851] env[68233]: DEBUG nova.network.neutron [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Updating instance_info_cache with network_info: [{"id": "486238a1-39d6-463c-8bb2-8fd9577c7798", "address": "fa:16:3e:d3:6c:d9", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap486238a1-39", "ovs_interfaceid": "486238a1-39d6-463c-8bb2-8fd9577c7798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.757999] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 602.773901] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 602.774062] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 602.780058] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781754, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.791486] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 602.813300] env[68233]: DEBUG nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 602.985753] env[68233]: DEBUG oslo_vmware.api [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781752, 'name': PowerOnVM_Task, 'duration_secs': 1.699638} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.990318] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.990318] env[68233]: INFO nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Took 16.28 seconds to spawn the instance on the hypervisor. [ 602.991545] env[68233]: DEBUG nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 602.993029] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ae8724-d539-48f7-9e59-07b1d52600d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.996044] env[68233]: DEBUG nova.network.neutron [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Successfully updated port: db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.065923] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Acquiring lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.066178] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.066421] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Acquiring lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.066536] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.066707] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.070271] env[68233]: INFO nova.compute.manager [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Terminating instance [ 603.131284] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.131478] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.193146] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a78f025-3d72-4b73-8f96-14a3f6cafc7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.202302] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fead8619-8c44-4cce-9aa8-5c7316e52d8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.234805] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "refresh_cache-38c86c2b-9b2b-482e-b26d-066208467202" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.235121] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Instance network_info: |[{"id": "486238a1-39d6-463c-8bb2-8fd9577c7798", "address": "fa:16:3e:d3:6c:d9", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap486238a1-39", "ovs_interfaceid": "486238a1-39d6-463c-8bb2-8fd9577c7798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 603.235889] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50448960-be8b-482e-8409-3ec5b03fd15e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.238695] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:6c:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '486238a1-39d6-463c-8bb2-8fd9577c7798', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.245818] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Creating folder: Project (d921fe0876de499dbc86529a00b2c6f3). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 603.246428] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ad1f8da-fa69-4410-a8a9-a242e258e590 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.255125] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7338bc85-9b5b-4fe6-b898-e79f59d4d067 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.268597] env[68233]: DEBUG nova.compute.provider_tree [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 603.274076] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Created folder: Project (d921fe0876de499dbc86529a00b2c6f3) in parent group-v559223. [ 603.274278] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Creating folder: Instances. Parent ref: group-v559249. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 603.274743] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1ec81d65-173a-4dfc-aedd-6f878803d2a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.282859] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781754, 'name': ReconfigVM_Task, 'duration_secs': 0.673825} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.283846] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6/9eeb90c6-6ac2-43cb-887a-b69a28dc43a6.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 603.287013] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Created folder: Instances in parent group-v559249. [ 603.287013] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 603.287013] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c68f6f2-a984-4323-829a-9e130da3cbf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.287013] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 603.287013] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-711885b1-1f5f-4b29-af40-b186a9055ed6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.308036] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.308036] env[68233]: value = "task-2781758" [ 603.308036] env[68233]: _type = "Task" [ 603.308036] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.309332] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 603.309332] env[68233]: value = "task-2781757" [ 603.309332] env[68233]: _type = "Task" [ 603.309332] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.319672] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781758, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.322996] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781757, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.477281] env[68233]: DEBUG nova.network.neutron [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updated VIF entry in instance network info cache for port cd34c1dc-df6e-4115-b9e9-55df77ee36c9. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 603.477626] env[68233]: DEBUG nova.network.neutron [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updating instance_info_cache with network_info: [{"id": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "address": "fa:16:3e:17:11:83", "network": {"id": "ea80e702-bc90-4c9c-b2a4-255391807764", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1601733849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1939e7276d1c4f23bf462114f85e72a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd34c1dc-df", "ovs_interfaceid": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.499960] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.500152] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.500303] env[68233]: DEBUG nova.network.neutron [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.514032] env[68233]: INFO nova.compute.manager [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Took 28.82 seconds to build instance. [ 603.578798] env[68233]: DEBUG nova.compute.manager [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 603.578936] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 603.581351] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc888f9a-a0f5-4714-bc17-5a954c687472 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.591323] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 603.591604] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac9df067-4e2f-4d82-95f7-dc05462085c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.598033] env[68233]: DEBUG oslo_vmware.api [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Waiting for the task: (returnval){ [ 603.598033] env[68233]: value = "task-2781759" [ 603.598033] env[68233]: _type = "Task" [ 603.598033] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.609113] env[68233]: DEBUG oslo_vmware.api [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Task: {'id': task-2781759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.795292] env[68233]: ERROR nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [req-2d511433-ca71-4dfd-9570-29e4fa4cacd3] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2d511433-ca71-4dfd-9570-29e4fa4cacd3"}]} [ 603.795804] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.124s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 603.799562] env[68233]: ERROR nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Failed to build and run instance: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 51aa13e7-0977-4031-b209-4ae90c83752c (generation 23): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2d511433-ca71-4dfd-9570-29e4fa4cacd3"}]} [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Traceback (most recent call last): [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/nova/nova/compute/manager.py", line 2622, in _build_and_run_instance [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] with self.rt.instance_claim(context, instance, node, allocs, [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] return f(*args, **kwargs) [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 215, in instance_claim [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] self._update(elevated, cn) [ 603.799562] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] self._update_to_placement(context, compute_node, startup) [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] raise attempt.get() [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] six.reraise(self.value[0], self.value[1], self.value[2]) [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] raise value [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 603.799949] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] self.reportclient.update_from_provider_tree( [ 603.800362] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 603.800362] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] self.set_inventory_for_provider( [ 603.800362] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 603.800362] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] raise exception.ResourceProviderUpdateConflict( [ 603.800362] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 51aa13e7-0977-4031-b209-4ae90c83752c (generation 23): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2d511433-ca71-4dfd-9570-29e4fa4cacd3"}]} [ 603.800362] env[68233]: ERROR nova.compute.manager [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] [ 603.801129] env[68233]: DEBUG nova.compute.utils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] A conflict was encountered attempting to update resource provider 51aa13e7-0977-4031-b209-4ae90c83752c (generation 23): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource pro {{(pid=68233) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 603.802274] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.130s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.803816] env[68233]: INFO nova.compute.claims [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.807948] env[68233]: DEBUG nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Build of instance ebc4a17b-54fa-49eb-b899-6e339e56f27b was re-scheduled: A conflict was encountered attempting to update resource provider 51aa13e7-0977-4031-b209-4ae90c83752c (generation 23): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2d511433-ca71-4dfd-9570-29e4fa4cacd3"}]} {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 603.808348] env[68233]: DEBUG nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Unplugging VIFs for instance {{(pid=68233) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 603.808942] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "refresh_cache-ebc4a17b-54fa-49eb-b899-6e339e56f27b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.809128] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "refresh_cache-ebc4a17b-54fa-49eb-b899-6e339e56f27b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.809298] env[68233]: DEBUG nova.network.neutron [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.824982] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781758, 'name': CreateVM_Task, 'duration_secs': 0.378151} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.827810] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.829558] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781757, 'name': Rename_Task, 'duration_secs': 0.191938} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.829651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.829767] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.830100] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 603.830381] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 603.830595] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-379a1b2f-f35d-4ab1-9a0f-e10c8d90eb15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.832239] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-499c5c50-ee40-484a-8449-2cefdc6d8d5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.836705] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 603.836705] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cd66e9-ccc1-d1b7-2c27-62274e38a934" [ 603.836705] env[68233]: _type = "Task" [ 603.836705] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.841302] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 603.841302] env[68233]: value = "task-2781760" [ 603.841302] env[68233]: _type = "Task" [ 603.841302] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.848104] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cd66e9-ccc1-d1b7-2c27-62274e38a934, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.853107] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781760, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.981692] env[68233]: DEBUG oslo_concurrency.lockutils [req-bef4ba10-07bf-4fb2-9768-369d7a1577a5 req-f47c28f7-7677-404c-b592-562a4b7c03c8 service nova] Releasing lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.017616] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dc3e9781-5758-44a2-b347-6750f3b16e8a tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.344s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.054264] env[68233]: DEBUG nova.network.neutron [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.111797] env[68233]: DEBUG oslo_vmware.api [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Task: {'id': task-2781759, 'name': PowerOffVM_Task, 'duration_secs': 0.22735} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.111797] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 604.111797] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 604.111797] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58077a9c-7bb2-46a7-ad13-36262f68712b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.180998] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 604.180998] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 604.180998] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Deleting the datastore file [datastore2] 102187bd-0cb2-4496-8dd0-9101b24ee4fa {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 604.180998] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f25823d6-6377-4186-854d-1a00c0742927 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.188578] env[68233]: DEBUG oslo_vmware.api [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Waiting for the task: (returnval){ [ 604.188578] env[68233]: value = "task-2781762" [ 604.188578] env[68233]: _type = "Task" [ 604.188578] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.202398] env[68233]: DEBUG oslo_vmware.api [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Task: {'id': task-2781762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.245728] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.246055] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.255084] env[68233]: DEBUG nova.network.neutron [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updating instance_info_cache with network_info: [{"id": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "address": "fa:16:3e:d8:f5:e8", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb2a881b-a7", "ovs_interfaceid": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.336244] env[68233]: DEBUG nova.network.neutron [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.351396] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cd66e9-ccc1-d1b7-2c27-62274e38a934, 'name': SearchDatastore_Task, 'duration_secs': 0.009516} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.355381] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.355381] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 604.355517] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.355650] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.356308] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 604.356308] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781760, 'name': PowerOnVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.356308] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f3e0f0b-038a-45e4-988c-f6ae267d7499 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.377530] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 604.377751] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 604.378801] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58e38fa3-1078-4844-9eff-39856497a181 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.387035] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 604.387035] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9ff6d-b7fa-5e83-79fa-b50955ec9c1c" [ 604.387035] env[68233]: _type = "Task" [ 604.387035] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.395011] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9ff6d-b7fa-5e83-79fa-b50955ec9c1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.452033] env[68233]: DEBUG nova.network.neutron [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.520302] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 604.531623] env[68233]: DEBUG nova.compute.manager [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Received event network-vif-deleted-23205d19-f3cb-4543-8f61-314cc465e55f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 604.533024] env[68233]: DEBUG nova.compute.manager [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Received event network-vif-plugged-486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 604.533024] env[68233]: DEBUG oslo_concurrency.lockutils [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] Acquiring lock "38c86c2b-9b2b-482e-b26d-066208467202-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.533225] env[68233]: DEBUG oslo_concurrency.lockutils [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] Lock "38c86c2b-9b2b-482e-b26d-066208467202-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.533396] env[68233]: DEBUG oslo_concurrency.lockutils [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] Lock "38c86c2b-9b2b-482e-b26d-066208467202-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.533561] env[68233]: DEBUG nova.compute.manager [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] No waiting events found dispatching network-vif-plugged-486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 604.534158] env[68233]: WARNING nova.compute.manager [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Received unexpected event network-vif-plugged-486238a1-39d6-463c-8bb2-8fd9577c7798 for instance with vm_state building and task_state spawning. [ 604.534158] env[68233]: DEBUG nova.compute.manager [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Received event network-changed-486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 604.534158] env[68233]: DEBUG nova.compute.manager [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Refreshing instance network info cache due to event network-changed-486238a1-39d6-463c-8bb2-8fd9577c7798. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 604.534297] env[68233]: DEBUG oslo_concurrency.lockutils [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] Acquiring lock "refresh_cache-38c86c2b-9b2b-482e-b26d-066208467202" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.534429] env[68233]: DEBUG oslo_concurrency.lockutils [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] Acquired lock "refresh_cache-38c86c2b-9b2b-482e-b26d-066208467202" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.534578] env[68233]: DEBUG nova.network.neutron [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Refreshing network info cache for port 486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 604.698448] env[68233]: DEBUG oslo_vmware.api [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Task: {'id': task-2781762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.333067} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.699948] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 604.699948] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 604.700160] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 604.700340] env[68233]: INFO nova.compute.manager [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Took 1.12 seconds to destroy the instance on the hypervisor. [ 604.700580] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.703678] env[68233]: DEBUG nova.compute.manager [-] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 604.703678] env[68233]: DEBUG nova.network.neutron [-] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 604.705644] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.706010] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.763244] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Releasing lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.763244] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Instance network_info: |[{"id": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "address": "fa:16:3e:d8:f5:e8", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb2a881b-a7", "ovs_interfaceid": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 604.763453] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:f5:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'db2a881b-a7e3-40d6-9df5-f9280b97cfc9', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.772383] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Creating folder: Project (4520e2c17033482598edf2bbde01450c). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.773881] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fab4f8c-2dc2-43cc-b87e-fe29d2b0c21e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.784362] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Created folder: Project (4520e2c17033482598edf2bbde01450c) in parent group-v559223. [ 604.784547] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Creating folder: Instances. Parent ref: group-v559252. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.784779] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-784bef7c-e8d5-4cfc-9654-7a46fdf81dff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.793921] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Created folder: Instances in parent group-v559252. [ 604.794188] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.794396] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.794602] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13f2da6c-4b99-4de7-9a9c-d16d424be599 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.819096] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.819096] env[68233]: value = "task-2781765" [ 604.819096] env[68233]: _type = "Task" [ 604.819096] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.827169] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781765, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.831959] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 604.847047] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 604.847445] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 604.857588] env[68233]: DEBUG oslo_vmware.api [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2781760, 'name': PowerOnVM_Task, 'duration_secs': 0.669622} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.857778] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 604.858950] env[68233]: INFO nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Took 10.06 seconds to spawn the instance on the hypervisor. [ 604.858950] env[68233]: DEBUG nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 604.859927] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931fba64-54d6-43eb-a1c6-9137b39d885e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.865264] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 604.891480] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 604.902073] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9ff6d-b7fa-5e83-79fa-b50955ec9c1c, 'name': SearchDatastore_Task, 'duration_secs': 0.017784} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.903341] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26fa8735-4a17-438b-a841-ba19d9f75bc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.910283] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 604.910283] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c8f08c-e4a7-36b0-fe03-ee9cd57bf66f" [ 604.910283] env[68233]: _type = "Task" [ 604.910283] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.920370] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c8f08c-e4a7-36b0-fe03-ee9cd57bf66f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.955434] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "refresh_cache-ebc4a17b-54fa-49eb-b899-6e339e56f27b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.955664] env[68233]: DEBUG nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68233) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 604.956299] env[68233]: DEBUG nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 604.956517] env[68233]: DEBUG nova.network.neutron [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 605.008185] env[68233]: DEBUG nova.network.neutron [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 605.044077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.259010] env[68233]: INFO nova.compute.manager [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Rebuilding instance [ 605.288209] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4666ac-51cd-4719-8118-4085c02e7fac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.299725] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1924c0b-030e-4c6e-aa16-20fa393bedeb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.307316] env[68233]: DEBUG nova.compute.manager [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 605.307316] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ae0377-930e-4bf0-b571-55dc3f21eb4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.340922] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0a5cb63-9640-4b77-8f25-7c844d94c8dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.356094] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781765, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.359320] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e534bff9-25f8-4917-bf96-37254c380ea4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.376232] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.387048] env[68233]: INFO nova.compute.manager [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Took 18.58 seconds to build instance. [ 605.424103] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c8f08c-e4a7-36b0-fe03-ee9cd57bf66f, 'name': SearchDatastore_Task, 'duration_secs': 0.011679} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.424376] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.424759] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 38c86c2b-9b2b-482e-b26d-066208467202/38c86c2b-9b2b-482e-b26d-066208467202.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 605.424961] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c48b927-54be-4b56-80b4-9e36d9aa4fe8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.433488] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 605.433488] env[68233]: value = "task-2781766" [ 605.433488] env[68233]: _type = "Task" [ 605.433488] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.443391] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781766, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.447261] env[68233]: DEBUG nova.network.neutron [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Updated VIF entry in instance network info cache for port 486238a1-39d6-463c-8bb2-8fd9577c7798. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 605.447570] env[68233]: DEBUG nova.network.neutron [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Updating instance_info_cache with network_info: [{"id": "486238a1-39d6-463c-8bb2-8fd9577c7798", "address": "fa:16:3e:d3:6c:d9", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap486238a1-39", "ovs_interfaceid": "486238a1-39d6-463c-8bb2-8fd9577c7798", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.500724] env[68233]: DEBUG nova.network.neutron [-] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.512162] env[68233]: DEBUG nova.network.neutron [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.847652] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781765, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.889045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4487aed-9bc8-48ac-a511-e05d706e1d41 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.098s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.902273] env[68233]: ERROR nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [req-44c4281b-e113-47cf-a053-7d4976b5915e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-44c4281b-e113-47cf-a053-7d4976b5915e"}]} [ 605.922938] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 605.941061] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 605.941061] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.950230] env[68233]: DEBUG oslo_concurrency.lockutils [req-61215baf-b69e-4cd4-8d8b-03daed313808 req-c7d9dc65-4493-45ef-bf63-b58d09a0161e service nova] Releasing lock "refresh_cache-38c86c2b-9b2b-482e-b26d-066208467202" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.950777] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781766, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.958617] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 605.960934] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 26 to 27 during operation: update_aggregates {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 605.985609] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 606.003562] env[68233]: INFO nova.compute.manager [-] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Took 1.30 seconds to deallocate network for instance. [ 606.018017] env[68233]: INFO nova.compute.manager [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: ebc4a17b-54fa-49eb-b899-6e339e56f27b] Took 1.06 seconds to deallocate network for instance. [ 606.221073] env[68233]: DEBUG nova.compute.manager [None req-c946ac87-28aa-4f22-a3ea-d850c734dee4 tempest-ServerDiagnosticsV248Test-1749919594 tempest-ServerDiagnosticsV248Test-1749919594-project-admin] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 606.224022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb284a7-030a-4425-a34b-5c3df8a896d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.232346] env[68233]: INFO nova.compute.manager [None req-c946ac87-28aa-4f22-a3ea-d850c734dee4 tempest-ServerDiagnosticsV248Test-1749919594 tempest-ServerDiagnosticsV248Test-1749919594-project-admin] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Retrieving diagnostics [ 606.232346] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ba0848-f184-4c91-a866-9fbfd3b4d359 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.317963] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.318537] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.348178] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781765, 'name': CreateVM_Task, 'duration_secs': 1.3813} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.348240] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 606.348969] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.349124] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.349437] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 606.349682] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c9aa61-ff0a-4be2-98c1-44e653d3cbee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.355351] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 606.356063] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2fdcfe8-1b13-45da-a722-d387a6e47e38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.359302] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 606.359302] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5248410e-810a-df2f-730c-2fa0493d3800" [ 606.359302] env[68233]: _type = "Task" [ 606.359302] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.364280] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 606.364280] env[68233]: value = "task-2781767" [ 606.364280] env[68233]: _type = "Task" [ 606.364280] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.373409] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5248410e-810a-df2f-730c-2fa0493d3800, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.380224] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.390267] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187c37dd-f76f-433e-a589-ffd5a0422b70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.393655] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 606.404044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "75f58a50-7891-42df-8820-c997300a3159" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.404122] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "75f58a50-7891-42df-8820-c997300a3159" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.405132] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea2afc5-f55a-4bff-94d7-3b8a9be6d412 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.447868] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4190969e-8193-48d5-a17e-8c42459d44d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.455839] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781766, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727823} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.458391] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 38c86c2b-9b2b-482e-b26d-066208467202/38c86c2b-9b2b-482e-b26d-066208467202.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 606.458497] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 606.458786] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fa3dcbf-73d5-4370-8758-f321d2c8a02a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.461485] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6dfd563-35f6-445c-9e99-a96e82085317 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.479166] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 606.479697] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 606.479697] env[68233]: value = "task-2781768" [ 606.479697] env[68233]: _type = "Task" [ 606.479697] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.488285] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.511821] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.871893] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5248410e-810a-df2f-730c-2fa0493d3800, 'name': SearchDatastore_Task, 'duration_secs': 0.021736} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.872591] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.873163] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.873163] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.873230] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.873402] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 606.873706] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8519206c-fa6e-40be-9705-7c12b4d87a95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.878168] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781767, 'name': PowerOffVM_Task, 'duration_secs': 0.429076} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.878721] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 606.879036] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.879743] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bfb847-ae8a-47a9-a0c7-5a60ffe347fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.884679] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 606.884929] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 606.887702] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0213c18f-b6c9-4ffa-b868-b78875d046dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.889995] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 606.890182] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-330aa67b-33e2-4b54-827e-96c9d6af4967 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.894271] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 606.894271] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523ab2e7-6451-7e1e-17d7-e773bc8d2c55" [ 606.894271] env[68233]: _type = "Task" [ 606.894271] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.901839] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523ab2e7-6451-7e1e-17d7-e773bc8d2c55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.918377] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 606.918597] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 606.918780] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Deleting the datastore file [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 606.919089] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88c9f01f-0ee7-4810-ba31-59b6df4bfdc5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.924123] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.925134] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 606.925134] env[68233]: value = "task-2781770" [ 606.925134] env[68233]: _type = "Task" [ 606.925134] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.933684] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781770, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.000563] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08877} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.000563] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 607.001131] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d1e171-b4f1-4a9f-8b3d-3c267f175401 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.024494] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 38c86c2b-9b2b-482e-b26d-066208467202/38c86c2b-9b2b-482e-b26d-066208467202.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 607.025747] env[68233]: ERROR nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [req-21304100-737e-4d30-b225-c75b7477ecbf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21304100-737e-4d30-b225-c75b7477ecbf"}]} [ 607.026845] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a91c655a-21e9-4a75-8d24-83b1f7fdf94d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.052501] env[68233]: DEBUG nova.compute.manager [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Received event network-vif-plugged-db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 607.052609] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] Acquiring lock "35cbc15b-48d8-4acd-a957-eec3421df1ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.052998] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.053156] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.053358] env[68233]: DEBUG nova.compute.manager [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] No waiting events found dispatching network-vif-plugged-db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 607.053628] env[68233]: WARNING nova.compute.manager [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Received unexpected event network-vif-plugged-db2a881b-a7e3-40d6-9df5-f9280b97cfc9 for instance with vm_state building and task_state spawning. [ 607.053804] env[68233]: DEBUG nova.compute.manager [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Received event network-changed-db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 607.054054] env[68233]: DEBUG nova.compute.manager [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Refreshing instance network info cache due to event network-changed-db2a881b-a7e3-40d6-9df5-f9280b97cfc9. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 607.054310] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] Acquiring lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.054643] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] Acquired lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.054848] env[68233]: DEBUG nova.network.neutron [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Refreshing network info cache for port db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 607.057847] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 607.057847] env[68233]: value = "task-2781771" [ 607.057847] env[68233]: _type = "Task" [ 607.057847] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.063097] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 607.069183] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781771, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.084208] env[68233]: INFO nova.scheduler.client.report [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleted allocations for instance ebc4a17b-54fa-49eb-b899-6e339e56f27b [ 607.091411] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 607.091628] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 607.107352] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 607.135093] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 607.404748] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523ab2e7-6451-7e1e-17d7-e773bc8d2c55, 'name': SearchDatastore_Task, 'duration_secs': 0.011504} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.408111] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e540e7ff-4000-4aa2-b594-730f0b112657 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.415992] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 607.415992] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b701a6-8839-4651-0385-fa3f1a4c3cf0" [ 607.415992] env[68233]: _type = "Task" [ 607.415992] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.424317] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b701a6-8839-4651-0385-fa3f1a4c3cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.436372] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781770, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126918} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.437298] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 607.437298] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 607.437298] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 607.495562] env[68233]: INFO nova.compute.manager [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Rebuilding instance [ 607.537914] env[68233]: DEBUG nova.compute.manager [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 607.540773] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82a9ebe-3099-4bfe-aa0e-8fb70a0b9af2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.565652] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095c2a2f-8003-4d27-8ea2-ee5292664cb6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.578678] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7aee3c-95f7-448c-805f-29e486906688 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.581748] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781771, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.609389] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e87d7fe-0825-4629-9cb0-3d2fd6484ca2 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "ebc4a17b-54fa-49eb-b899-6e339e56f27b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.722s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.611195] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ddf8173-7530-489f-9098-cd615b812a0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.620996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370a2764-115a-4a8a-ae18-b4aa6506d34a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.638290] env[68233]: DEBUG nova.compute.provider_tree [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 607.680174] env[68233]: DEBUG nova.compute.manager [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 607.932025] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b701a6-8839-4651-0385-fa3f1a4c3cf0, 'name': SearchDatastore_Task, 'duration_secs': 0.020633} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.932025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.932509] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 35cbc15b-48d8-4acd-a957-eec3421df1ce/35cbc15b-48d8-4acd-a957-eec3421df1ce.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 607.932635] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94aed7f4-a1ee-47e9-a091-313c501f663b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.945927] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 607.945927] env[68233]: value = "task-2781772" [ 607.945927] env[68233]: _type = "Task" [ 607.945927] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.954585] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.076872] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781771, 'name': ReconfigVM_Task, 'duration_secs': 0.940691} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.076872] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 38c86c2b-9b2b-482e-b26d-066208467202/38c86c2b-9b2b-482e-b26d-066208467202.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 608.076872] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8af66fe7-4b01-49b1-ab81-bfe5fec03c35 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.083177] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 608.083177] env[68233]: value = "task-2781773" [ 608.083177] env[68233]: _type = "Task" [ 608.083177] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.092614] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781773, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.115837] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 608.141590] env[68233]: DEBUG nova.scheduler.client.report [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 608.213579] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.331285] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.331536] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.431955] env[68233]: DEBUG nova.network.neutron [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updated VIF entry in instance network info cache for port db2a881b-a7e3-40d6-9df5-f9280b97cfc9. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 608.432418] env[68233]: DEBUG nova.network.neutron [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updating instance_info_cache with network_info: [{"id": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "address": "fa:16:3e:d8:f5:e8", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb2a881b-a7", "ovs_interfaceid": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.461813] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781772, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.472215] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 608.472543] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 608.472736] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 608.472948] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 608.473137] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 608.473299] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 608.473526] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 608.473720] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 608.474586] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 608.474586] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 608.474586] env[68233]: DEBUG nova.virt.hardware [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 608.475670] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eab14d12-c559-4300-9596-401766b3565c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.485961] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d4e4a9-455f-43ed-a1c2-5a7b977d4e00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.503223] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.509171] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.509424] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.509675] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5258d0db-8ea5-49bc-947e-fe2faf42323d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.530373] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.530373] env[68233]: value = "task-2781774" [ 608.530373] env[68233]: _type = "Task" [ 608.530373] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.541475] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781774, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.557421] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 608.557630] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9cc258d4-3f02-4b9d-8334-23ebac17ee69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.569443] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 608.569443] env[68233]: value = "task-2781775" [ 608.569443] env[68233]: _type = "Task" [ 608.569443] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.581051] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781775, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.596480] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781773, 'name': Rename_Task, 'duration_secs': 0.260466} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.596951] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.597253] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-919e84d3-4fa0-470d-8a1d-4fa8d1be12df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.607892] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 608.607892] env[68233]: value = "task-2781776" [ 608.607892] env[68233]: _type = "Task" [ 608.607892] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.623377] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781776, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.641912] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.646267] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.844s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.646680] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 608.650157] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.200s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.654851] env[68233]: INFO nova.compute.claims [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 608.673390] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "6105602a-b8eb-4128-a492-b60a9468018f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.673766] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "6105602a-b8eb-4128-a492-b60a9468018f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.674154] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "6105602a-b8eb-4128-a492-b60a9468018f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.674403] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "6105602a-b8eb-4128-a492-b60a9468018f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.674628] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "6105602a-b8eb-4128-a492-b60a9468018f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.682084] env[68233]: INFO nova.compute.manager [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Terminating instance [ 608.750522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "34889575-95ea-451c-aa59-49a5f30d4e4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.750807] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.751052] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "34889575-95ea-451c-aa59-49a5f30d4e4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.751338] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.751706] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.753974] env[68233]: INFO nova.compute.manager [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Terminating instance [ 608.935855] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f7e9004-d339-4cba-aa29-2984a1cbd465 req-217dab06-07bd-4666-9cb5-2634145dd38f service nova] Releasing lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.962875] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.742745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.963542] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 35cbc15b-48d8-4acd-a957-eec3421df1ce/35cbc15b-48d8-4acd-a957-eec3421df1ce.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 608.963740] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 608.964277] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20b1725f-fe34-447c-8954-57db5d36dae0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.973701] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 608.973701] env[68233]: value = "task-2781777" [ 608.973701] env[68233]: _type = "Task" [ 608.973701] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.984735] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.040538] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781774, 'name': CreateVM_Task, 'duration_secs': 0.354777} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.040801] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.041359] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.041579] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.041938] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.042378] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0df3535-ae8f-4a8b-a8a6-b33718c665b4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.047443] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 609.047443] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5251aeca-47bf-e190-e84e-5026af346215" [ 609.047443] env[68233]: _type = "Task" [ 609.047443] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.056463] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5251aeca-47bf-e190-e84e-5026af346215, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.079382] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781775, 'name': PowerOffVM_Task, 'duration_secs': 0.185731} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.079798] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 609.080611] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.081348] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2117364-6c0c-4142-a0f5-8a10e4cfea9a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.088220] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 609.088452] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ea6f5b7-d6b8-4158-82ce-bca34edfb9fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.114881] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 609.115101] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 609.115230] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Deleting the datastore file [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 609.116110] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-431bd447-a148-45dd-81c6-d73d1f0e1130 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.124020] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781776, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.125576] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 609.125576] env[68233]: value = "task-2781779" [ 609.125576] env[68233]: _type = "Task" [ 609.125576] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.134513] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781779, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.157849] env[68233]: DEBUG nova.compute.utils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 609.161540] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 609.161726] env[68233]: DEBUG nova.network.neutron [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 609.187121] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "refresh_cache-6105602a-b8eb-4128-a492-b60a9468018f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.187365] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquired lock "refresh_cache-6105602a-b8eb-4128-a492-b60a9468018f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.187509] env[68233]: DEBUG nova.network.neutron [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.228646] env[68233]: DEBUG nova.policy [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95122ece8b8b445aa04349a675f262b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc7604c87d6485097fe5658d68217b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 609.258296] env[68233]: DEBUG nova.compute.manager [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 609.258559] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 609.261581] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d256b4a-9ea9-4b20-9f48-89c7ab76fb9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.272186] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 609.278844] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c78e8b7d-4d2c-4be7-8ab5-f001d4946701 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.283612] env[68233]: DEBUG oslo_vmware.api [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 609.283612] env[68233]: value = "task-2781780" [ 609.283612] env[68233]: _type = "Task" [ 609.283612] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.292725] env[68233]: DEBUG oslo_vmware.api [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.485705] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071056} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.486110] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.487164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5c4bff-277e-48e3-8a8c-ebe6ac485339 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.513912] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 35cbc15b-48d8-4acd-a957-eec3421df1ce/35cbc15b-48d8-4acd-a957-eec3421df1ce.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.514229] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e936e284-b28d-4c73-8bc4-d40e1b0c6782 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.536601] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 609.536601] env[68233]: value = "task-2781785" [ 609.536601] env[68233]: _type = "Task" [ 609.536601] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.546023] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.558273] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5251aeca-47bf-e190-e84e-5026af346215, 'name': SearchDatastore_Task, 'duration_secs': 0.014993} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.558273] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.558273] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.558273] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.558492] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.558492] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.558492] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9c7582f-2b35-4813-9eec-34f6a51a4b3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.567122] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.567122] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.567461] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a40958f-149b-4efe-8ba2-a3e33f65c682 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.572922] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 609.572922] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52615f8c-beaa-7c7c-e64f-431d254c6f49" [ 609.572922] env[68233]: _type = "Task" [ 609.572922] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.580968] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52615f8c-beaa-7c7c-e64f-431d254c6f49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.619431] env[68233]: DEBUG oslo_vmware.api [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781776, 'name': PowerOnVM_Task, 'duration_secs': 0.879348} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.619723] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 609.619876] env[68233]: INFO nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Took 11.46 seconds to spawn the instance on the hypervisor. [ 609.623024] env[68233]: DEBUG nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.623024] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365adc00-d713-4525-8117-7ed7bc481074 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.640609] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781779, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224199} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.640609] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 609.640609] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 609.640609] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 609.662697] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 609.715238] env[68233]: DEBUG nova.network.neutron [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.760126] env[68233]: DEBUG nova.network.neutron [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Successfully created port: cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.775252] env[68233]: DEBUG nova.network.neutron [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.793435] env[68233]: DEBUG oslo_vmware.api [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781780, 'name': PowerOffVM_Task, 'duration_secs': 0.307624} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.794319] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 609.794596] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 609.794826] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7d4c293-8aff-4a86-beb4-c513d1c58cb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.882484] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 609.882693] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 609.883854] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Deleting the datastore file [datastore2] 34889575-95ea-451c-aa59-49a5f30d4e4c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 609.883854] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb8e139e-3dbe-4d06-813e-ff8ac89b7946 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.890344] env[68233]: DEBUG oslo_vmware.api [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for the task: (returnval){ [ 609.890344] env[68233]: value = "task-2781787" [ 609.890344] env[68233]: _type = "Task" [ 609.890344] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.902567] env[68233]: DEBUG oslo_vmware.api [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.046282] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.083686] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52615f8c-beaa-7c7c-e64f-431d254c6f49, 'name': SearchDatastore_Task, 'duration_secs': 0.015966} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.090676] env[68233]: DEBUG nova.compute.manager [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Received event network-changed-cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 610.090884] env[68233]: DEBUG nova.compute.manager [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Refreshing instance network info cache due to event network-changed-cd34c1dc-df6e-4115-b9e9-55df77ee36c9. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 610.091136] env[68233]: DEBUG oslo_concurrency.lockutils [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] Acquiring lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.091292] env[68233]: DEBUG oslo_concurrency.lockutils [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] Acquired lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.091454] env[68233]: DEBUG nova.network.neutron [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Refreshing network info cache for port cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.094997] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37f209aa-3dcc-44b2-b42c-2bb68dde0319 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.101436] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 610.101436] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52add52d-3bfc-ad5d-a930-9417ff127eca" [ 610.101436] env[68233]: _type = "Task" [ 610.101436] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.112166] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52add52d-3bfc-ad5d-a930-9417ff127eca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.113861] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-554f6074-b7fa-45b0-9caf-4587a7088a78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.123813] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5b61a3-8952-44c9-b708-425a8fc9ac8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.167549] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e18ae4c-90f2-49ee-bcdc-a6ae6f8ad50e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.173838] env[68233]: INFO nova.compute.manager [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Took 21.27 seconds to build instance. [ 610.181978] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84b2552-2cf8-4d34-a1c5-f63ca42f96c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.200152] env[68233]: DEBUG nova.compute.provider_tree [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.280769] env[68233]: DEBUG nova.compute.manager [req-ddea83b8-c0c3-4b1e-830f-af968501fea2 req-128e16e2-5e90-4b14-b1e9-dc12803b1c78 service nova] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Received event network-vif-deleted-1628cd6e-9d87-4435-9443-623c66f0590e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 610.281293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Releasing lock "refresh_cache-6105602a-b8eb-4128-a492-b60a9468018f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.281667] env[68233]: DEBUG nova.compute.manager [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 610.281853] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 610.283359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f5ffa5-1dbf-4c1c-a89c-3cc976189195 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.293242] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 610.294035] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d150c3b-6526-4670-b22b-ee293dd3e035 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.303166] env[68233]: DEBUG oslo_vmware.api [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 610.303166] env[68233]: value = "task-2781788" [ 610.303166] env[68233]: _type = "Task" [ 610.303166] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.313852] env[68233]: DEBUG oslo_vmware.api [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781788, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.402037] env[68233]: DEBUG oslo_vmware.api [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Task: {'id': task-2781787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338966} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.402409] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 610.402662] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 610.402856] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 610.403048] env[68233]: INFO nova.compute.manager [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 610.403310] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 610.403497] env[68233]: DEBUG nova.compute.manager [-] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 610.403594] env[68233]: DEBUG nova.network.neutron [-] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 610.550923] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781785, 'name': ReconfigVM_Task, 'duration_secs': 0.858549} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.551337] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 35cbc15b-48d8-4acd-a957-eec3421df1ce/35cbc15b-48d8-4acd-a957-eec3421df1ce.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 610.552254] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-169b2082-46eb-4a54-9933-22dd19e51103 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.560558] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 610.560558] env[68233]: value = "task-2781789" [ 610.560558] env[68233]: _type = "Task" [ 610.560558] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.574044] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781789, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.612259] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52add52d-3bfc-ad5d-a930-9417ff127eca, 'name': SearchDatastore_Task, 'duration_secs': 0.016272} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.612746] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.613155] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.614743] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf2be52e-a51e-420f-a032-fa0117645695 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.622771] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 610.622771] env[68233]: value = "task-2781790" [ 610.622771] env[68233]: _type = "Task" [ 610.622771] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.634031] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.674772] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18a4e075-f2ac-483f-9310-92207099ec4d tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "38c86c2b-9b2b-482e-b26d-066208467202" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.784s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.678152] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 610.706706] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.706706] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.706706] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.707324] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.707324] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.707324] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.707324] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.707324] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.707638] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.707638] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.707734] env[68233]: DEBUG nova.virt.hardware [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.708577] env[68233]: DEBUG nova.scheduler.client.report [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 610.714157] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a75803-7f87-419c-9268-2888207f79ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.731088] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1d5eb1-b00f-44dd-a1f8-c98b2b6b4358 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.737823] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 610.738159] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 610.738462] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 610.738596] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 610.738744] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 610.738893] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 610.739238] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 610.739446] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 610.739625] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 610.739791] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 610.739968] env[68233]: DEBUG nova.virt.hardware [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 610.742035] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e8ead5-3277-499e-8885-4a06fe9c2d12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.763568] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 610.767650] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 610.769364] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.769364] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c198fddb-5353-4644-befb-2d9b97db98ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.773289] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c4244780-8da1-4c07-a9ab-648d030bed30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.805284] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.805284] env[68233]: value = "task-2781791" [ 610.805284] env[68233]: _type = "Task" [ 610.805284] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.818834] env[68233]: DEBUG oslo_vmware.api [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781788, 'name': PowerOffVM_Task, 'duration_secs': 0.142865} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.821988] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 610.822201] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 610.822431] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781791, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.822628] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cc75de2-dcab-4509-bdda-8a84dc252446 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.850740] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 610.851125] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 610.851203] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Deleting the datastore file [datastore2] 6105602a-b8eb-4128-a492-b60a9468018f {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 610.851454] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db4277a2-18ed-4812-a408-36829444100e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.860404] env[68233]: DEBUG oslo_vmware.api [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for the task: (returnval){ [ 610.860404] env[68233]: value = "task-2781793" [ 610.860404] env[68233]: _type = "Task" [ 610.860404] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.871364] env[68233]: DEBUG oslo_vmware.api [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.953515] env[68233]: DEBUG nova.network.neutron [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updated VIF entry in instance network info cache for port cd34c1dc-df6e-4115-b9e9-55df77ee36c9. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 610.954080] env[68233]: DEBUG nova.network.neutron [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updating instance_info_cache with network_info: [{"id": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "address": "fa:16:3e:17:11:83", "network": {"id": "ea80e702-bc90-4c9c-b2a4-255391807764", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1601733849-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.244", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1939e7276d1c4f23bf462114f85e72a8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f972c061-0cd5-4aed-8cfb-42cc4a08835a", "external-id": "nsx-vlan-transportzone-814", "segmentation_id": 814, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd34c1dc-df", "ovs_interfaceid": "cd34c1dc-df6e-4115-b9e9-55df77ee36c9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.076166] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781789, 'name': Rename_Task, 'duration_secs': 0.178434} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.076425] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.076886] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b88a5a83-983f-4dab-8e20-39405af3169b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.086984] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 611.086984] env[68233]: value = "task-2781794" [ 611.086984] env[68233]: _type = "Task" [ 611.086984] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.100086] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781794, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.134552] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781790, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.178175] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 611.220501] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.221297] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 611.224930] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.531s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.226728] env[68233]: INFO nova.compute.claims [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.293509] env[68233]: DEBUG nova.network.neutron [-] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.318879] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781791, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.370023] env[68233]: DEBUG oslo_vmware.api [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Task: {'id': task-2781793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404522} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.370023] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 611.370178] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 611.371203] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 611.371203] env[68233]: INFO nova.compute.manager [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Took 1.09 seconds to destroy the instance on the hypervisor. [ 611.371203] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 611.371203] env[68233]: DEBUG nova.compute.manager [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 611.371203] env[68233]: DEBUG nova.network.neutron [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 611.390102] env[68233]: DEBUG nova.network.neutron [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 611.462476] env[68233]: DEBUG oslo_concurrency.lockutils [req-ba2fb79e-37bb-43a1-a8ce-bef9539772e0 req-a310756f-bf9b-46fd-bb8d-54cfc0482453 service nova] Releasing lock "refresh_cache-9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.604969] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781794, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.637071] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781790, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625304} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.637421] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.637732] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.638073] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df41017a-042d-460c-870c-38b61c85f2a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.650653] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 611.650653] env[68233]: value = "task-2781795" [ 611.650653] env[68233]: _type = "Task" [ 611.650653] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.654980] env[68233]: DEBUG nova.network.neutron [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Successfully updated port: cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.661759] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.705979] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.735183] env[68233]: DEBUG nova.compute.utils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 611.742760] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 611.742760] env[68233]: DEBUG nova.network.neutron [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 611.796430] env[68233]: INFO nova.compute.manager [-] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Took 1.39 seconds to deallocate network for instance. [ 611.817915] env[68233]: DEBUG nova.policy [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc3bd33b2e2143f8be165a10e4665c7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '963898fb1cae4e6e9438ace9dd437f9e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 611.828025] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781791, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.893434] env[68233]: DEBUG nova.network.neutron [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.101020] env[68233]: DEBUG oslo_vmware.api [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2781794, 'name': PowerOnVM_Task, 'duration_secs': 0.566136} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.101020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 612.101020] env[68233]: INFO nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Took 11.41 seconds to spawn the instance on the hypervisor. [ 612.101020] env[68233]: DEBUG nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.101020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67047c8e-e9e6-4fdd-a41c-4479d2e3457d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.161342] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.161342] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.161342] env[68233]: DEBUG nova.network.neutron [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.170601] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.270035} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.176292] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.177534] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0019b995-dd82-46db-8ea5-90ffaa8370c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.218770] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Reconfiguring VM instance instance-00000002 to attach disk [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.219945] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0eb64c0f-fcd0-4206-b3b0-5d23173550da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.243118] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 612.253623] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 612.253623] env[68233]: value = "task-2781796" [ 612.253623] env[68233]: _type = "Task" [ 612.253623] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.266450] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781796, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.305681] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.325426] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781791, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.395870] env[68233]: INFO nova.compute.manager [-] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Took 1.02 seconds to deallocate network for instance. [ 612.471271] env[68233]: DEBUG nova.network.neutron [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Successfully created port: a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.627135] env[68233]: INFO nova.compute.manager [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Took 20.45 seconds to build instance. [ 612.711757] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070b8069-cf76-4c68-a98f-ea2cf6ddc903 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.715430] env[68233]: DEBUG nova.network.neutron [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.723945] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7227e7-c666-4737-adcd-8085951255e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.769809] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef512f8-959e-4a29-976e-0ca5bb651236 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.777655] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781796, 'name': ReconfigVM_Task, 'duration_secs': 0.411075} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.780045] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Reconfigured VM instance instance-00000002 to attach disk [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67/e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 612.781053] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2d7a8a9-09e2-4c6b-9a34-5627cfbe5249 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.785392] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72cc1447-f173-4326-ad00-2493aa0c0fbd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.800614] env[68233]: DEBUG nova.compute.provider_tree [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 612.803048] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 612.803048] env[68233]: value = "task-2781798" [ 612.803048] env[68233]: _type = "Task" [ 612.803048] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.812581] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781798, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.821796] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781791, 'name': CreateVM_Task, 'duration_secs': 1.582254} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.821923] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 612.822274] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.822424] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.822851] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 612.823814] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50993788-e007-4ef8-85b8-0a884ed15f7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.828445] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 612.828445] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529c5a3f-2f92-fb7e-696b-7cd45dc6add7" [ 612.828445] env[68233]: _type = "Task" [ 612.828445] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.837214] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529c5a3f-2f92-fb7e-696b-7cd45dc6add7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.907541] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.913510] env[68233]: DEBUG nova.compute.manager [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Received event network-vif-plugged-cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 612.913510] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.913510] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.913510] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.913681] env[68233]: DEBUG nova.compute.manager [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] No waiting events found dispatching network-vif-plugged-cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 612.915496] env[68233]: WARNING nova.compute.manager [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Received unexpected event network-vif-plugged-cc05db07-a36a-494d-92b6-af58fdd9d143 for instance with vm_state building and task_state spawning. [ 612.915496] env[68233]: DEBUG nova.compute.manager [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Received event network-changed-cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 612.915496] env[68233]: DEBUG nova.compute.manager [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Refreshing instance network info cache due to event network-changed-cc05db07-a36a-494d-92b6-af58fdd9d143. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 612.915496] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.076221] env[68233]: DEBUG nova.compute.manager [req-189d758d-feae-4477-b7be-0855f2881344 req-56505765-0856-4c3e-bd16-359277ec5d89 service nova] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Received event network-vif-deleted-d2aed54a-2ca1-42eb-b9f8-e65ec5f8bc84 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 613.129607] env[68233]: DEBUG oslo_concurrency.lockutils [None req-150caf99-17f8-4d3a-a288-87aff394bc37 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.982s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.157138] env[68233]: DEBUG nova.network.neutron [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.267789] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 613.299607] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 613.299845] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.300235] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 613.300420] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.300420] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 613.300562] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 613.300769] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 613.300923] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 613.301226] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 613.301461] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 613.301671] env[68233]: DEBUG nova.virt.hardware [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 613.303068] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a322b4c-ba4e-4812-a927-607138c73a76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.321270] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781798, 'name': Rename_Task, 'duration_secs': 0.170083} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.326672] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.326775] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-db9ebf70-d798-4abd-9a3c-f63e213b56bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.331548] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd4e9a0-d5f2-4613-a069-07114eeb1f12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.335042] env[68233]: ERROR nova.scheduler.client.report [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [req-4ecc6465-deef-46fb-93da-96785a02feb1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4ecc6465-deef-46fb-93da-96785a02feb1"}]} [ 613.353423] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Waiting for the task: (returnval){ [ 613.353423] env[68233]: value = "task-2781799" [ 613.353423] env[68233]: _type = "Task" [ 613.353423] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.357306] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529c5a3f-2f92-fb7e-696b-7cd45dc6add7, 'name': SearchDatastore_Task, 'duration_secs': 0.02851} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.358459] env[68233]: DEBUG nova.scheduler.client.report [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 613.364957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.364957] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 613.364957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.364957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.365430] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.365867] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08159a61-833b-427f-b064-f51285101415 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.374186] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781799, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.376941] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.376941] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 613.377323] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eb916bd-92b4-41e5-b87d-2ed3f6afca57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.383437] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 613.383437] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f13f86-f3ab-bc7b-9b44-c6e6dfd97cdd" [ 613.383437] env[68233]: _type = "Task" [ 613.383437] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.384505] env[68233]: DEBUG nova.scheduler.client.report [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 613.384751] env[68233]: DEBUG nova.compute.provider_tree [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 613.396488] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f13f86-f3ab-bc7b-9b44-c6e6dfd97cdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.400399] env[68233]: DEBUG nova.scheduler.client.report [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 613.425543] env[68233]: DEBUG nova.scheduler.client.report [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 613.631900] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 613.661124] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.661392] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Instance network_info: |[{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 613.665023] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.665023] env[68233]: DEBUG nova.network.neutron [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Refreshing network info cache for port cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 613.666149] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:9c:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc05db07-a36a-494d-92b6-af58fdd9d143', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 613.675448] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating folder: Project (dbc7604c87d6485097fe5658d68217b9). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.680974] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be1389ff-bd1f-4eae-9348-bf09f058e5f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.690570] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created folder: Project (dbc7604c87d6485097fe5658d68217b9) in parent group-v559223. [ 613.690570] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating folder: Instances. Parent ref: group-v559260. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 613.690771] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f79bce-29a8-4fe7-b49b-433f8c106ee4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.704957] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created folder: Instances in parent group-v559260. [ 613.707795] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 613.710894] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 613.711163] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5453f535-a701-44e1-9558-58d3f78516ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.732365] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 613.732365] env[68233]: value = "task-2781802" [ 613.732365] env[68233]: _type = "Task" [ 613.732365] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.740106] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781802, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.871430] env[68233]: DEBUG oslo_vmware.api [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Task: {'id': task-2781799, 'name': PowerOnVM_Task, 'duration_secs': 0.463143} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.871885] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 613.871964] env[68233]: DEBUG nova.compute.manager [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 613.874605] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7620c3f-c287-4583-9ec4-61ea5b42aa03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.878635] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1318a9bc-ce56-4082-aa51-9a42138980a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.893602] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f631a3-690f-4233-8e6f-2f17e61544aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.900587] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f13f86-f3ab-bc7b-9b44-c6e6dfd97cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.028395} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.901972] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67339046-0e81-449e-a1a6-9df7a514d8f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.932133] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0fcc90-923c-4f4c-84c6-f274ab993c82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.936638] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 613.936638] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a616ca-ceb9-867b-ccf4-bf9de254be52" [ 613.936638] env[68233]: _type = "Task" [ 613.936638] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.945531] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec3a510-ecb5-43a5-b689-c3c85d12716a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.954871] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a616ca-ceb9-867b-ccf4-bf9de254be52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.966604] env[68233]: DEBUG nova.compute.provider_tree [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 614.132316] env[68233]: DEBUG nova.network.neutron [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updated VIF entry in instance network info cache for port cc05db07-a36a-494d-92b6-af58fdd9d143. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 614.132661] env[68233]: DEBUG nova.network.neutron [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.159601] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.245650] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781802, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.396415] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.451957] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a616ca-ceb9-867b-ccf4-bf9de254be52, 'name': SearchDatastore_Task, 'duration_secs': 0.016718} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.455158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.455158] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.455158] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d10c3491-a43c-4303-8121-c838b6385f86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.459209] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 614.459209] env[68233]: value = "task-2781804" [ 614.459209] env[68233]: _type = "Task" [ 614.459209] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.467182] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.509338] env[68233]: DEBUG nova.scheduler.client.report [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 30 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 614.509630] env[68233]: DEBUG nova.compute.provider_tree [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 30 to 31 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 614.509775] env[68233]: DEBUG nova.compute.provider_tree [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 614.635315] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a30b8a3-ba74-4895-8037-7ab4bcafb1de req-f06f685d-87b3-4ac7-9ef2-89475692ce1c service nova] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.755259] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781802, 'name': CreateVM_Task, 'duration_secs': 0.604251} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.755418] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 614.759138] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.759371] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.759702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 614.760325] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc970329-06f3-465a-8f9c-99cb6da8f34d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.765971] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 614.765971] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52110bfd-c6e9-ae1d-379d-b6864fe70df5" [ 614.765971] env[68233]: _type = "Task" [ 614.765971] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.774780] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52110bfd-c6e9-ae1d-379d-b6864fe70df5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.798549] env[68233]: DEBUG nova.network.neutron [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Successfully updated port: a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.972149] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781804, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.018642] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.791s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.018642] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 615.021718] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.913s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.023261] env[68233]: INFO nova.compute.claims [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.279743] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52110bfd-c6e9-ae1d-379d-b6864fe70df5, 'name': SearchDatastore_Task, 'duration_secs': 0.014153} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.280076] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.280453] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.280918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.280918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.281059] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.281337] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1a5e406-60cd-48d4-8a81-c6b9300c96f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.292870] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.293119] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 615.293897] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18908d2b-4d8b-43ea-a16f-c24fb8ca0b40 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.299285] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.299409] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.299559] env[68233]: DEBUG nova.network.neutron [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.303940] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 615.303940] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5254cb7a-ba57-ab6a-8fe9-ec3a836626d2" [ 615.303940] env[68233]: _type = "Task" [ 615.303940] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.317060] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5254cb7a-ba57-ab6a-8fe9-ec3a836626d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.471622] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.724613} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.471945] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 615.472284] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 615.472484] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ba6f2ab6-934a-49b2-bebe-95706c07812d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.479832] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 615.479832] env[68233]: value = "task-2781805" [ 615.479832] env[68233]: _type = "Task" [ 615.479832] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.491140] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781805, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.530993] env[68233]: DEBUG nova.compute.utils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 615.533175] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 615.533291] env[68233]: DEBUG nova.network.neutron [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 615.609912] env[68233]: DEBUG nova.policy [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 615.816784] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5254cb7a-ba57-ab6a-8fe9-ec3a836626d2, 'name': SearchDatastore_Task, 'duration_secs': 0.024327} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.817198] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c2690f7-4c69-4abc-b9fb-601ee3e0ebf3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.825116] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 615.825116] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52092420-c8b0-68ef-f9f2-796b4f74b9b8" [ 615.825116] env[68233]: _type = "Task" [ 615.825116] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.833679] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52092420-c8b0-68ef-f9f2-796b4f74b9b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.845370] env[68233]: DEBUG nova.network.neutron [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.996743] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781805, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070548} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.997050] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.997827] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e66e078-0b7d-4401-8197-77ebf1a3c823 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.021835] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.022173] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa5d2f72-8eec-4937-a4ee-4ec90ec66f8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.049885] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 616.057289] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 616.057289] env[68233]: value = "task-2781806" [ 616.057289] env[68233]: _type = "Task" [ 616.057289] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.074736] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781806, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.114371] env[68233]: DEBUG nova.compute.manager [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Received event network-vif-plugged-a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 616.114757] env[68233]: DEBUG oslo_concurrency.lockutils [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] Acquiring lock "87385201-3118-4a8e-9739-db3b431566c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.114757] env[68233]: DEBUG oslo_concurrency.lockutils [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] Lock "87385201-3118-4a8e-9739-db3b431566c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.115053] env[68233]: DEBUG oslo_concurrency.lockutils [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] Lock "87385201-3118-4a8e-9739-db3b431566c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.115147] env[68233]: DEBUG nova.compute.manager [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] No waiting events found dispatching network-vif-plugged-a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 616.115399] env[68233]: WARNING nova.compute.manager [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Received unexpected event network-vif-plugged-a38db034-3553-49b5-afdc-1b75d897f720 for instance with vm_state building and task_state spawning. [ 616.115503] env[68233]: DEBUG nova.compute.manager [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Received event network-changed-a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 616.115679] env[68233]: DEBUG nova.compute.manager [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Refreshing instance network info cache due to event network-changed-a38db034-3553-49b5-afdc-1b75d897f720. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 616.115897] env[68233]: DEBUG oslo_concurrency.lockutils [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] Acquiring lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.194066] env[68233]: DEBUG nova.network.neutron [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Updating instance_info_cache with network_info: [{"id": "a38db034-3553-49b5-afdc-1b75d897f720", "address": "fa:16:3e:c1:7e:7e", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa38db034-35", "ovs_interfaceid": "a38db034-3553-49b5-afdc-1b75d897f720", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.209150] env[68233]: DEBUG nova.compute.manager [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Received event network-changed-db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 616.209150] env[68233]: DEBUG nova.compute.manager [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Refreshing instance network info cache due to event network-changed-db2a881b-a7e3-40d6-9df5-f9280b97cfc9. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 616.209704] env[68233]: DEBUG oslo_concurrency.lockutils [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] Acquiring lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.210061] env[68233]: DEBUG oslo_concurrency.lockutils [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] Acquired lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.210448] env[68233]: DEBUG nova.network.neutron [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Refreshing network info cache for port db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.232926] env[68233]: DEBUG nova.network.neutron [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Successfully created port: 9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.339421] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52092420-c8b0-68ef-f9f2-796b4f74b9b8, 'name': SearchDatastore_Task, 'duration_secs': 0.050081} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.341870] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.341870] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8/4a388705-7e00-45dc-8891-c6e587b1cdb8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 616.341870] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b31bcc48-70be-4da7-a109-2f54ffc83c57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.352034] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 616.352034] env[68233]: value = "task-2781807" [ 616.352034] env[68233]: _type = "Task" [ 616.352034] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.361047] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.578335] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781806, 'name': ReconfigVM_Task, 'duration_secs': 0.468261} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.578689] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a/68a4e635-381d-4dc2-879c-5581cd5e189a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 616.580268] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b9ecfd8-354f-49c8-8917-b5557652bc97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.590062] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.590062] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.591778] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.591778] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.003s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.592168] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.594076] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 616.594076] env[68233]: value = "task-2781809" [ 616.594076] env[68233]: _type = "Task" [ 616.594076] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.599117] env[68233]: INFO nova.compute.manager [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Terminating instance [ 616.619488] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781809, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.633585] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d72619b-a135-49ce-b884-bda02ab19ab1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.644620] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02515570-3af6-46c6-97b5-afb818bb1fef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.681834] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cc3bf0-6658-4867-88aa-959e375ac1cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.691175] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2b4ce22-f6c0-402d-82d7-d50c127c0121 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.709028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.709028] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Instance network_info: |[{"id": "a38db034-3553-49b5-afdc-1b75d897f720", "address": "fa:16:3e:c1:7e:7e", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa38db034-35", "ovs_interfaceid": "a38db034-3553-49b5-afdc-1b75d897f720", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 616.709301] env[68233]: DEBUG nova.compute.provider_tree [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.710258] env[68233]: DEBUG oslo_concurrency.lockutils [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] Acquired lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.710456] env[68233]: DEBUG nova.network.neutron [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Refreshing network info cache for port a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.711669] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:7e:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0dd3c126-9d86-4f9a-b81c-e9627c7a5401', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a38db034-3553-49b5-afdc-1b75d897f720', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.720870] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating folder: Project (963898fb1cae4e6e9438ace9dd437f9e). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.723926] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aaa52c40-d04f-4fb0-a32b-23dcf5e5241d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.736975] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created folder: Project (963898fb1cae4e6e9438ace9dd437f9e) in parent group-v559223. [ 616.736975] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating folder: Instances. Parent ref: group-v559263. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.736975] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9feec6b5-2054-4d31-ba16-57e0deca5897 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.763340] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created folder: Instances in parent group-v559263. [ 616.763340] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.763340] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 616.763340] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fd11f66-7fa5-4cfb-8fb6-4b05a16235d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.771245] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.771245] env[68233]: value = "task-2781812" [ 616.771245] env[68233]: _type = "Task" [ 616.771245] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.787249] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781812, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.869554] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781807, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.074334] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 617.106541] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 617.106697] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.106814] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 617.107074] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.107252] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 617.107399] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 617.107606] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 617.107763] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 617.107929] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 617.108313] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 617.108313] env[68233]: DEBUG nova.virt.hardware [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 617.109169] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde23e77-a583-4495-ae2b-f4bc5e83e0e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.112655] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "refresh_cache-e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.112832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquired lock "refresh_cache-e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.113062] env[68233]: DEBUG nova.network.neutron [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.121552] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781809, 'name': Rename_Task, 'duration_secs': 0.271708} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.123870] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.124247] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2cb1cad-9f4c-48bf-a6b9-251694700f2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.129383] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80903c1f-254c-4409-a666-9dcfb4b25381 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.150424] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 617.150424] env[68233]: value = "task-2781813" [ 617.150424] env[68233]: _type = "Task" [ 617.150424] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.160641] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781813, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.223638] env[68233]: DEBUG nova.network.neutron [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updated VIF entry in instance network info cache for port db2a881b-a7e3-40d6-9df5-f9280b97cfc9. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 617.223638] env[68233]: DEBUG nova.network.neutron [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updating instance_info_cache with network_info: [{"id": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "address": "fa:16:3e:d8:f5:e8", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdb2a881b-a7", "ovs_interfaceid": "db2a881b-a7e3-40d6-9df5-f9280b97cfc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.225497] env[68233]: DEBUG nova.scheduler.client.report [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 617.281454] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781812, 'name': CreateVM_Task, 'duration_secs': 0.488537} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.281948] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.282637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.282885] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.283117] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.283586] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53ecf16a-f50b-4254-b379-22b818fd03b4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.288110] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 617.288110] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7a472-38df-2bd3-2ef9-6ec3920661f2" [ 617.288110] env[68233]: _type = "Task" [ 617.288110] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.296073] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7a472-38df-2bd3-2ef9-6ec3920661f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.361844] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564218} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.362223] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8/4a388705-7e00-45dc-8891-c6e587b1cdb8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 617.362490] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 617.362567] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-418f66a4-6c15-4aee-a0f3-e547f8529698 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.370567] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 617.370567] env[68233]: value = "task-2781814" [ 617.370567] env[68233]: _type = "Task" [ 617.370567] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.383840] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.600285] env[68233]: DEBUG nova.network.neutron [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Updated VIF entry in instance network info cache for port a38db034-3553-49b5-afdc-1b75d897f720. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 617.600285] env[68233]: DEBUG nova.network.neutron [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Updating instance_info_cache with network_info: [{"id": "a38db034-3553-49b5-afdc-1b75d897f720", "address": "fa:16:3e:c1:7e:7e", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa38db034-35", "ovs_interfaceid": "a38db034-3553-49b5-afdc-1b75d897f720", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.656413] env[68233]: DEBUG nova.network.neutron [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.666333] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781813, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.737366] env[68233]: DEBUG oslo_concurrency.lockutils [req-41741bc6-b11b-44ec-bd6b-6c67af3f5bba req-662d81b1-c84e-4fe1-a501-77867c011e39 service nova] Releasing lock "refresh_cache-35cbc15b-48d8-4acd-a957-eec3421df1ce" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.738492] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.717s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.739042] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 617.746688] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.063s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.746920] env[68233]: DEBUG nova.objects.instance [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 617.802794] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7a472-38df-2bd3-2ef9-6ec3920661f2, 'name': SearchDatastore_Task, 'duration_secs': 0.015709} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.803111] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.803342] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.803598] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.803747] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.803924] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 617.804190] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-158975f5-2e77-4dae-8cf1-94ed6f46f4cd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.816849] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 617.817080] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 617.817780] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efd20033-2dfe-495d-8094-88fee2bb9fe8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.823692] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 617.823692] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f44970-9ba6-c066-2fa1-d8a35a4403fc" [ 617.823692] env[68233]: _type = "Task" [ 617.823692] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.836263] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f44970-9ba6-c066-2fa1-d8a35a4403fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.886335] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.3025} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.886335] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 617.886335] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd11926-b8e0-4935-9c3d-9ba14180966c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.925146] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8/4a388705-7e00-45dc-8891-c6e587b1cdb8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 617.926387] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38a8eaee-5ba9-4358-83d9-aa6125efe343 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.952823] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 617.952823] env[68233]: value = "task-2781815" [ 617.952823] env[68233]: _type = "Task" [ 617.952823] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.966762] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781815, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.967769] env[68233]: DEBUG nova.network.neutron [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.102957] env[68233]: DEBUG oslo_concurrency.lockutils [req-36a7274a-1363-4b72-8a1e-1401da56a85c req-c092f992-fff6-43f2-8e11-2bdf8bca8912 service nova] Releasing lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.169087] env[68233]: DEBUG oslo_vmware.api [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781813, 'name': PowerOnVM_Task, 'duration_secs': 0.970757} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.169087] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 618.170362] env[68233]: DEBUG nova.compute.manager [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 618.171236] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49dae45-c767-407a-9833-e862b5049101 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.248015] env[68233]: DEBUG nova.compute.utils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 618.249776] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 618.249941] env[68233]: DEBUG nova.network.neutron [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 618.264176] env[68233]: DEBUG nova.network.neutron [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Successfully updated port: 9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 618.331311] env[68233]: DEBUG nova.policy [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '236b431e2f194bb2a4bc0090158d54b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e90f2c4afe61469fa4a081e470058fc7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 618.337037] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f44970-9ba6-c066-2fa1-d8a35a4403fc, 'name': SearchDatastore_Task, 'duration_secs': 0.03134} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.337793] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21bcd73e-4c16-4677-9fbe-54e92318cdc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.344453] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 618.344453] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6b0c0-5d2b-b4a4-4b86-7a49cc25694b" [ 618.344453] env[68233]: _type = "Task" [ 618.344453] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.353743] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6b0c0-5d2b-b4a4-4b86-7a49cc25694b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.465123] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781815, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.471626] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Releasing lock "refresh_cache-e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.472067] env[68233]: DEBUG nova.compute.manager [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 618.476076] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 618.476076] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f7015e-14c3-44ad-9886-7091945d47b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.484150] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 618.484702] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2c32d58-36e8-4368-b25d-0b50a21c347c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.495070] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 618.495070] env[68233]: value = "task-2781816" [ 618.495070] env[68233]: _type = "Task" [ 618.495070] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.509759] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781816, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.684019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "769956c6-7824-41db-9779-fc1b5f53dd94" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.684019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "769956c6-7824-41db-9779-fc1b5f53dd94" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.699500] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.755065] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 618.758588] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3077b1e4-8250-4afd-9004-9395ab9884d1 tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.759948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.319s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.762574] env[68233]: INFO nova.compute.claims [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.768391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-11ec9800-fa7e-4dbd-bdc1-63d0b496589f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.768391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-11ec9800-fa7e-4dbd-bdc1-63d0b496589f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.768391] env[68233]: DEBUG nova.network.neutron [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.838305] env[68233]: DEBUG nova.network.neutron [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Successfully created port: f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.856873] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6b0c0-5d2b-b4a4-4b86-7a49cc25694b, 'name': SearchDatastore_Task, 'duration_secs': 0.013465} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.859167] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.859452] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/87385201-3118-4a8e-9739-db3b431566c5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 618.859711] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-725796ca-fb57-400f-bf90-38e86aa8c175 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.873386] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 618.873386] env[68233]: value = "task-2781818" [ 618.873386] env[68233]: _type = "Task" [ 618.873386] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.889748] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.970777] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781815, 'name': ReconfigVM_Task, 'duration_secs': 0.666347} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.971198] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8/4a388705-7e00-45dc-8891-c6e587b1cdb8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 618.972055] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-567931f3-eda6-4f2f-9958-afa5f524e77b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.985757] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 618.985757] env[68233]: value = "task-2781819" [ 618.985757] env[68233]: _type = "Task" [ 618.985757] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.009065] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781819, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.016701] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781816, 'name': PowerOffVM_Task, 'duration_secs': 0.203356} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.017075] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 619.017243] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 619.017547] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1760432-b573-4b14-992d-61509d713b98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.055236] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 619.055563] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 619.055847] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Deleting the datastore file [datastore2] e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.056558] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4037fd0b-8cdc-402a-8b69-c2b75a46eabc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.069485] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for the task: (returnval){ [ 619.069485] env[68233]: value = "task-2781821" [ 619.069485] env[68233]: _type = "Task" [ 619.069485] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.088402] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.221131] env[68233]: DEBUG nova.compute.manager [req-6076d0ce-3b30-4eee-aa74-88bfdfaf60c0 req-5e65c78c-650d-4e00-82f1-7bbf2bf5a8b2 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Received event network-vif-plugged-9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 619.222217] env[68233]: DEBUG oslo_concurrency.lockutils [req-6076d0ce-3b30-4eee-aa74-88bfdfaf60c0 req-5e65c78c-650d-4e00-82f1-7bbf2bf5a8b2 service nova] Acquiring lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.222863] env[68233]: DEBUG oslo_concurrency.lockutils [req-6076d0ce-3b30-4eee-aa74-88bfdfaf60c0 req-5e65c78c-650d-4e00-82f1-7bbf2bf5a8b2 service nova] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.223265] env[68233]: DEBUG oslo_concurrency.lockutils [req-6076d0ce-3b30-4eee-aa74-88bfdfaf60c0 req-5e65c78c-650d-4e00-82f1-7bbf2bf5a8b2 service nova] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.223506] env[68233]: DEBUG nova.compute.manager [req-6076d0ce-3b30-4eee-aa74-88bfdfaf60c0 req-5e65c78c-650d-4e00-82f1-7bbf2bf5a8b2 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] No waiting events found dispatching network-vif-plugged-9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.223787] env[68233]: WARNING nova.compute.manager [req-6076d0ce-3b30-4eee-aa74-88bfdfaf60c0 req-5e65c78c-650d-4e00-82f1-7bbf2bf5a8b2 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Received unexpected event network-vif-plugged-9bdac2bf-51ef-46a3-ad11-6c893bcc0570 for instance with vm_state building and task_state spawning. [ 619.361691] env[68233]: DEBUG nova.network.neutron [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 619.393798] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781818, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.450615] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.452278] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.505883] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781819, 'name': Rename_Task, 'duration_secs': 0.491658} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.508740] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 619.508740] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f56f7b4-6036-4725-9bb3-d86394507226 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.521483] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 619.521483] env[68233]: value = "task-2781822" [ 619.521483] env[68233]: _type = "Task" [ 619.521483] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.536684] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781822, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.582627] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.773038] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 619.802720] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.803087] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.803353] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.803619] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.803803] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.804030] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.804281] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.805415] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.805808] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.806061] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.806420] env[68233]: DEBUG nova.virt.hardware [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.807319] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a93573c-c8c7-4dbc-a2f9-6ead3905d77f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.819452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0a965b-d3e7-43b6-b9ba-97ac9f912b4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.889132] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781818, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.751814} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.890477] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/87385201-3118-4a8e-9739-db3b431566c5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 619.890822] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 619.891356] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fff5a81-5f4e-49b2-836d-41bdeae0fe34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.901280] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 619.901280] env[68233]: value = "task-2781823" [ 619.901280] env[68233]: _type = "Task" [ 619.901280] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.916275] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781823, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.038312] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781822, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.082569] env[68233]: DEBUG oslo_vmware.api [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Task: {'id': task-2781821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.535766} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.085901] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.087305] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.087305] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.087305] env[68233]: INFO nova.compute.manager [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Took 1.61 seconds to destroy the instance on the hypervisor. [ 620.087305] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.087874] env[68233]: DEBUG nova.compute.manager [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 620.088085] env[68233]: DEBUG nova.network.neutron [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 620.100194] env[68233]: DEBUG nova.network.neutron [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Updating instance_info_cache with network_info: [{"id": "9bdac2bf-51ef-46a3-ad11-6c893bcc0570", "address": "fa:16:3e:f7:7b:a4", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bdac2bf-51", "ovs_interfaceid": "9bdac2bf-51ef-46a3-ad11-6c893bcc0570", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.116320] env[68233]: DEBUG nova.network.neutron [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 620.255242] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529d7767-0c7a-4cb3-989a-bc6aaa8e29bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.263326] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c488c60-0c00-4dce-8759-e9d775961b6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.301707] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8c3022-873b-4a6e-8617-22ac1a28e352 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.311125] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d50d4c-dfc1-4684-9028-b6852aba5eb7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.330378] env[68233]: DEBUG nova.compute.provider_tree [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.412603] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781823, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073686} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.412876] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.413689] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7299b11-224b-4a68-b112-86f96799a3d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.441490] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/87385201-3118-4a8e-9739-db3b431566c5.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.442124] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-781d3159-7496-4e8a-98d3-b27ae3beb107 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.466041] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 620.466041] env[68233]: value = "task-2781824" [ 620.466041] env[68233]: _type = "Task" [ 620.466041] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.485022] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781824, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.534857] env[68233]: DEBUG oslo_vmware.api [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2781822, 'name': PowerOnVM_Task, 'duration_secs': 0.934138} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.535272] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 620.535489] env[68233]: INFO nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Took 9.86 seconds to spawn the instance on the hypervisor. [ 620.535740] env[68233]: DEBUG nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 620.536779] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134ea0b5-a13c-48a3-b9cd-06a1fb0a7e51 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.604600] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-11ec9800-fa7e-4dbd-bdc1-63d0b496589f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.604943] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Instance network_info: |[{"id": "9bdac2bf-51ef-46a3-ad11-6c893bcc0570", "address": "fa:16:3e:f7:7b:a4", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bdac2bf-51", "ovs_interfaceid": "9bdac2bf-51ef-46a3-ad11-6c893bcc0570", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 620.605368] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f7:7b:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9bdac2bf-51ef-46a3-ad11-6c893bcc0570', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 620.615925] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating folder: Project (3636e6c8e70e4996ac83a672732a1ff6). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.617025] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c23b6d3c-5017-4b63-90aa-406f8d414b61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.619378] env[68233]: DEBUG nova.network.neutron [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.704648] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created folder: Project (3636e6c8e70e4996ac83a672732a1ff6) in parent group-v559223. [ 620.705563] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating folder: Instances. Parent ref: group-v559267. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 620.706413] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-183185af-cdf2-4a8c-8a23-dd5af10e7c74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.723196] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created folder: Instances in parent group-v559267. [ 620.724246] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 620.724246] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 620.724535] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c063127c-ac2d-4a5b-9cd7-9c0a670bdbd4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.756319] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 620.756319] env[68233]: value = "task-2781828" [ 620.756319] env[68233]: _type = "Task" [ 620.756319] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.769956] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781828, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.836022] env[68233]: DEBUG nova.scheduler.client.report [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.979727] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781824, 'name': ReconfigVM_Task, 'duration_secs': 0.315086} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.980375] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/87385201-3118-4a8e-9739-db3b431566c5.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 620.981230] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2d49e08-35da-49e4-b65e-ca3adc9de15c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.990897] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 620.990897] env[68233]: value = "task-2781829" [ 620.990897] env[68233]: _type = "Task" [ 620.990897] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.010356] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781829, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.062485] env[68233]: INFO nova.compute.manager [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Took 26.43 seconds to build instance. [ 621.072474] env[68233]: DEBUG nova.network.neutron [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Successfully updated port: f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 621.122793] env[68233]: INFO nova.compute.manager [-] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Took 1.03 seconds to deallocate network for instance. [ 621.267688] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781828, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.343803] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.344455] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 621.349188] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.991s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.349521] env[68233]: DEBUG nova.objects.instance [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lazy-loading 'resources' on Instance uuid eb5dc742-fa8f-4bac-89cb-afa57b5abe12 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 621.408089] env[68233]: DEBUG nova.compute.manager [req-d5eb078f-ecce-4bc2-9f97-ba951595e0d6 req-ed94184c-bf23-43e3-8397-4bfd176c4215 service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Received event network-vif-plugged-f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 621.408455] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5eb078f-ecce-4bc2-9f97-ba951595e0d6 req-ed94184c-bf23-43e3-8397-4bfd176c4215 service nova] Acquiring lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.408624] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5eb078f-ecce-4bc2-9f97-ba951595e0d6 req-ed94184c-bf23-43e3-8397-4bfd176c4215 service nova] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.408661] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5eb078f-ecce-4bc2-9f97-ba951595e0d6 req-ed94184c-bf23-43e3-8397-4bfd176c4215 service nova] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.408878] env[68233]: DEBUG nova.compute.manager [req-d5eb078f-ecce-4bc2-9f97-ba951595e0d6 req-ed94184c-bf23-43e3-8397-4bfd176c4215 service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] No waiting events found dispatching network-vif-plugged-f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 621.413323] env[68233]: WARNING nova.compute.manager [req-d5eb078f-ecce-4bc2-9f97-ba951595e0d6 req-ed94184c-bf23-43e3-8397-4bfd176c4215 service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Received unexpected event network-vif-plugged-f1ca2437-5a7d-4e37-9f83-6cbec685618e for instance with vm_state building and task_state spawning. [ 621.503188] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781829, 'name': Rename_Task, 'duration_secs': 0.37628} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.503684] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 621.508674] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84a26a5f-fcad-4410-8620-1ed8040dad8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.517273] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 621.517273] env[68233]: value = "task-2781830" [ 621.517273] env[68233]: _type = "Task" [ 621.517273] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.527051] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.570699] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff614522-1d2d-4edb-b7a4-a3f4cac20f0c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.948s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.583745] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "refresh_cache-a340c66c-74eb-43e5-8e72-54d9c8b07a26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.583745] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "refresh_cache-a340c66c-74eb-43e5-8e72-54d9c8b07a26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.583745] env[68233]: DEBUG nova.network.neutron [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 621.634878] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.769817] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781828, 'name': CreateVM_Task, 'duration_secs': 0.681284} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.770067] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 621.771447] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.771856] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.772846] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 621.773581] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27f3e9ec-076a-4424-b173-6f7e170c2d4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.780345] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 621.780345] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521b6a4c-bf8e-0fa7-ca42-2a2db3b55ca0" [ 621.780345] env[68233]: _type = "Task" [ 621.780345] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.789554] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521b6a4c-bf8e-0fa7-ca42-2a2db3b55ca0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.853980] env[68233]: DEBUG nova.compute.utils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 621.864293] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 621.864493] env[68233]: DEBUG nova.network.neutron [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.868848] env[68233]: DEBUG nova.compute.manager [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Received event network-changed-9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 621.868848] env[68233]: DEBUG nova.compute.manager [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Refreshing instance network info cache due to event network-changed-9bdac2bf-51ef-46a3-ad11-6c893bcc0570. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 621.868848] env[68233]: DEBUG oslo_concurrency.lockutils [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] Acquiring lock "refresh_cache-11ec9800-fa7e-4dbd-bdc1-63d0b496589f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.868848] env[68233]: DEBUG oslo_concurrency.lockutils [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] Acquired lock "refresh_cache-11ec9800-fa7e-4dbd-bdc1-63d0b496589f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.868848] env[68233]: DEBUG nova.network.neutron [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Refreshing network info cache for port 9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 621.875954] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "68a4e635-381d-4dc2-879c-5581cd5e189a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.876217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "68a4e635-381d-4dc2-879c-5581cd5e189a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.876405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "68a4e635-381d-4dc2-879c-5581cd5e189a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.876584] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "68a4e635-381d-4dc2-879c-5581cd5e189a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.876785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "68a4e635-381d-4dc2-879c-5581cd5e189a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.884020] env[68233]: INFO nova.compute.manager [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Terminating instance [ 621.957303] env[68233]: DEBUG nova.policy [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4283f9183a5f4e06b8479120055a3f3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cabc700bdda94bfe87c4a418aeb40e25', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 622.035045] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781830, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.075103] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 622.295094] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521b6a4c-bf8e-0fa7-ca42-2a2db3b55ca0, 'name': SearchDatastore_Task, 'duration_secs': 0.011778} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.295411] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.295639] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.296646] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.296646] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.296646] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.296646] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0222abd-6fcb-4dad-8ecb-c88b13dec535 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.306846] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.307177] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 622.310354] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef5cebc4-09e8-4600-b6b1-c13668d8f123 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.317650] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 622.317650] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d55bde-9c3c-ad3b-baff-f34f8a3e51d4" [ 622.317650] env[68233]: _type = "Task" [ 622.317650] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.332468] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d55bde-9c3c-ad3b-baff-f34f8a3e51d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.356466] env[68233]: DEBUG nova.network.neutron [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.364755] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 622.384184] env[68233]: DEBUG nova.compute.manager [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.385454] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98219b1-3db7-4406-92df-a12cf749ec89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.394022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "refresh_cache-68a4e635-381d-4dc2-879c-5581cd5e189a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.394022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquired lock "refresh_cache-68a4e635-381d-4dc2-879c-5581cd5e189a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.394022] env[68233]: DEBUG nova.network.neutron [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.397050] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963c2aef-dd03-47bc-b121-0fb1c50fb018 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.414632] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf15b583-23fd-42b1-a512-cf1a80734b81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.456322] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d7f677-23c9-4750-9f56-726f4fce35e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.467418] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9448d1ff-90de-44f9-90a9-cacd6137bf0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.484514] env[68233]: DEBUG nova.compute.provider_tree [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.529550] env[68233]: DEBUG oslo_vmware.api [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781830, 'name': PowerOnVM_Task, 'duration_secs': 0.520562} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.529955] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 622.530239] env[68233]: INFO nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Took 9.26 seconds to spawn the instance on the hypervisor. [ 622.530863] env[68233]: DEBUG nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.531435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec676be1-06cf-4b8d-89a0-42dd247625f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.596502] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.615417] env[68233]: DEBUG nova.network.neutron [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Updating instance_info_cache with network_info: [{"id": "f1ca2437-5a7d-4e37-9f83-6cbec685618e", "address": "fa:16:3e:d2:21:20", "network": {"id": "9a0919c0-3110-4f87-aa64-3a2632e6c0cf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1132455188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90f2c4afe61469fa4a081e470058fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ca2437-5a", "ovs_interfaceid": "f1ca2437-5a7d-4e37-9f83-6cbec685618e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.720559] env[68233]: DEBUG nova.network.neutron [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Successfully created port: c00757db-7911-4503-932f-f300752512de {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.830315] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d55bde-9c3c-ad3b-baff-f34f8a3e51d4, 'name': SearchDatastore_Task, 'duration_secs': 0.01057} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.833247] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4642289f-263a-4b82-9bad-c13a5d8ba268 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.839932] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 622.839932] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524584c4-7f77-de8f-a4bb-86540940db8a" [ 622.839932] env[68233]: _type = "Task" [ 622.839932] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.851796] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524584c4-7f77-de8f-a4bb-86540940db8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.913010] env[68233]: INFO nova.compute.manager [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] instance snapshotting [ 622.916649] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb22886-98d2-490b-96ac-1853a411af44 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.945682] env[68233]: DEBUG nova.network.neutron [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.949012] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6731ee9-8967-4376-a22a-079e8638e0ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.987465] env[68233]: DEBUG nova.scheduler.client.report [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 623.053939] env[68233]: INFO nova.compute.manager [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Took 25.63 seconds to build instance. [ 623.074091] env[68233]: DEBUG nova.network.neutron [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.118647] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "refresh_cache-a340c66c-74eb-43e5-8e72-54d9c8b07a26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.118996] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Instance network_info: |[{"id": "f1ca2437-5a7d-4e37-9f83-6cbec685618e", "address": "fa:16:3e:d2:21:20", "network": {"id": "9a0919c0-3110-4f87-aa64-3a2632e6c0cf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1132455188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90f2c4afe61469fa4a081e470058fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ca2437-5a", "ovs_interfaceid": "f1ca2437-5a7d-4e37-9f83-6cbec685618e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 623.119453] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d2:21:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496ac502-bfc4-4324-8332-cac473eb7cc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f1ca2437-5a7d-4e37-9f83-6cbec685618e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 623.130617] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Creating folder: Project (e90f2c4afe61469fa4a081e470058fc7). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 623.131037] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-91b6eeb8-c4c7-4bd8-b547-8045ad0242c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.149031] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Created folder: Project (e90f2c4afe61469fa4a081e470058fc7) in parent group-v559223. [ 623.149031] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Creating folder: Instances. Parent ref: group-v559270. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 623.149031] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a47ef731-3e55-4b06-bcee-96b05954a82e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.159341] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Created folder: Instances in parent group-v559270. [ 623.159612] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 623.159808] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 623.160029] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d95475e-0919-4488-b0dc-1fbd0ec13173 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.184487] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 623.184487] env[68233]: value = "task-2781834" [ 623.184487] env[68233]: _type = "Task" [ 623.184487] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.196011] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781834, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.358627] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524584c4-7f77-de8f-a4bb-86540940db8a, 'name': SearchDatastore_Task, 'duration_secs': 0.010636} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.359022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.359372] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 11ec9800-fa7e-4dbd-bdc1-63d0b496589f/11ec9800-fa7e-4dbd-bdc1-63d0b496589f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 623.359688] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeeca3a2-9050-43d7-af6a-a817491b1146 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.371838] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 623.371838] env[68233]: value = "task-2781835" [ 623.371838] env[68233]: _type = "Task" [ 623.371838] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.382642] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 623.388451] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.413548] env[68233]: DEBUG nova.network.neutron [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Updated VIF entry in instance network info cache for port 9bdac2bf-51ef-46a3-ad11-6c893bcc0570. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 623.413823] env[68233]: DEBUG nova.network.neutron [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Updating instance_info_cache with network_info: [{"id": "9bdac2bf-51ef-46a3-ad11-6c893bcc0570", "address": "fa:16:3e:f7:7b:a4", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bdac2bf-51", "ovs_interfaceid": "9bdac2bf-51ef-46a3-ad11-6c893bcc0570", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.427857] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 623.428110] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.428269] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 623.428447] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.428587] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 623.428726] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 623.428936] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 623.429088] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 623.429287] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 623.429451] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 623.429622] env[68233]: DEBUG nova.virt.hardware [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 623.430534] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c3a03d-0dc2-44a5-8440-fc37644cb4f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.440629] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6519c561-584b-470a-8930-a4c0d12d792d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.465301] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 623.465684] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-11646f81-750e-4662-956c-3c0c64392eee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.476639] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 623.476639] env[68233]: value = "task-2781836" [ 623.476639] env[68233]: _type = "Task" [ 623.476639] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.486577] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781836, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.494751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.498033] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.454s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.503296] env[68233]: INFO nova.compute.claims [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.532227] env[68233]: INFO nova.scheduler.client.report [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Deleted allocations for instance eb5dc742-fa8f-4bac-89cb-afa57b5abe12 [ 623.556232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1243d49f-3203-4927-9024-253305122108 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "87385201-3118-4a8e-9739-db3b431566c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.149s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.578015] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Releasing lock "refresh_cache-68a4e635-381d-4dc2-879c-5581cd5e189a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.578444] env[68233]: DEBUG nova.compute.manager [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 623.578573] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 623.580504] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2a484f-2ceb-4cd2-a9e5-b8e74ba3ca3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.596592] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 623.596592] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbccf18f-1744-4b44-b766-881ffd314380 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.609913] env[68233]: DEBUG oslo_vmware.api [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 623.609913] env[68233]: value = "task-2781837" [ 623.609913] env[68233]: _type = "Task" [ 623.609913] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.623766] env[68233]: DEBUG oslo_vmware.api [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.699048] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781834, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.887237] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781835, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.919990] env[68233]: DEBUG oslo_concurrency.lockutils [req-45f3899f-b17d-4036-add1-a3bc479c5f64 req-6aad4051-75dd-48d3-95f1-7a66cf0422e7 service nova] Releasing lock "refresh_cache-11ec9800-fa7e-4dbd-bdc1-63d0b496589f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.991847] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781836, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.040797] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8aa510-12ad-407f-99d1-9756cae8946b tempest-ServerDiagnosticsTest-725888869 tempest-ServerDiagnosticsTest-725888869-project-member] Lock "eb5dc742-fa8f-4bac-89cb-afa57b5abe12" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.051s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.059875] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 624.128196] env[68233]: DEBUG oslo_vmware.api [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781837, 'name': PowerOffVM_Task, 'duration_secs': 0.220462} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.128477] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 624.128640] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 624.129403] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e80f3947-f8ec-417a-9e21-e3686a539449 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.161328] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.162364] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.162364] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Deleting the datastore file [datastore2] 68a4e635-381d-4dc2-879c-5581cd5e189a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.163937] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d13f96e5-f463-40aa-a221-1370cc75d5f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.171751] env[68233]: DEBUG oslo_vmware.api [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for the task: (returnval){ [ 624.171751] env[68233]: value = "task-2781839" [ 624.171751] env[68233]: _type = "Task" [ 624.171751] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.181749] env[68233]: DEBUG oslo_vmware.api [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.196070] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781834, 'name': CreateVM_Task, 'duration_secs': 0.53665} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.196854] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 624.196986] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.197045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.197374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 624.197631] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60215b1c-da67-432b-9980-46178d394d52 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.205812] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 624.205812] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526feb21-f2ef-cc82-9163-910976a34e08" [ 624.205812] env[68233]: _type = "Task" [ 624.205812] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.215105] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526feb21-f2ef-cc82-9163-910976a34e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.382843] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573538} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.383372] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 11ec9800-fa7e-4dbd-bdc1-63d0b496589f/11ec9800-fa7e-4dbd-bdc1-63d0b496589f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.383631] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.383733] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68ca76bd-359c-400c-afc2-d3d201271501 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.394074] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 624.394074] env[68233]: value = "task-2781840" [ 624.394074] env[68233]: _type = "Task" [ 624.394074] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.406508] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.489682] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781836, 'name': CreateSnapshot_Task, 'duration_secs': 0.932792} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.489909] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 624.491767] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe0e7df-2713-41c9-a2f5-eb13db778e3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.587260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.610328] env[68233]: DEBUG nova.network.neutron [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Successfully updated port: c00757db-7911-4503-932f-f300752512de {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 624.689651] env[68233]: DEBUG oslo_vmware.api [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Task: {'id': task-2781839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131051} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.689889] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 624.690080] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 624.690287] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.690462] env[68233]: INFO nova.compute.manager [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 624.690704] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.690886] env[68233]: DEBUG nova.compute.manager [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 624.690979] env[68233]: DEBUG nova.network.neutron [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.710922] env[68233]: DEBUG nova.network.neutron [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.722426] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526feb21-f2ef-cc82-9163-910976a34e08, 'name': SearchDatastore_Task, 'duration_secs': 0.011237} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.722769] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.722980] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 624.723879] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.724045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.724248] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 624.724514] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e0d33bdf-376c-4e4c-8a70-c69d50e8c686 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.738250] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 624.738397] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 624.739110] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81d7fbc0-c898-4809-89f4-8c724fa4e239 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.747216] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 624.747216] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524a8606-f52d-7a6d-6114-c3c640fb2ef7" [ 624.747216] env[68233]: _type = "Task" [ 624.747216] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.756480] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524a8606-f52d-7a6d-6114-c3c640fb2ef7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.906644] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.185069} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.908168] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.908479] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98d7d3e7-46dd-477f-a52e-2dfb2e124313 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.944724] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 11ec9800-fa7e-4dbd-bdc1-63d0b496589f/11ec9800-fa7e-4dbd-bdc1-63d0b496589f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.946690] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce2358d0-cae2-4189-baee-4f8f21064da7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.964691] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91556f6c-6841-4d82-bcfb-1f07cc4cd612 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.983305] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0542fc-2a9e-4b30-a339-a3bee83872dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.986850] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 624.986850] env[68233]: value = "task-2781842" [ 624.986850] env[68233]: _type = "Task" [ 624.986850] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.024594] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 625.028851] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5822d5c5-fe61-4e6e-99f3-419868464bae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.032370] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff32032-109c-4eeb-b3d7-3435f0d51ebf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.035058] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.042465] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f835e348-3123-4cac-9cb5-7ac393b389ef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.046840] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 625.046840] env[68233]: value = "task-2781843" [ 625.046840] env[68233]: _type = "Task" [ 625.046840] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.058220] env[68233]: DEBUG nova.compute.provider_tree [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.065442] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781843, 'name': CloneVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.112768] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "refresh_cache-d19421ad-88d5-4479-a6e4-c6d59e863b31" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.112927] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquired lock "refresh_cache-d19421ad-88d5-4479-a6e4-c6d59e863b31" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.113098] env[68233]: DEBUG nova.network.neutron [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.217440] env[68233]: DEBUG nova.network.neutron [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.264032] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524a8606-f52d-7a6d-6114-c3c640fb2ef7, 'name': SearchDatastore_Task, 'duration_secs': 0.011147} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.264032] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0a6c8e0-65e0-4830-a7b0-2a204c7ff9c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.268741] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 625.268741] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52542105-c045-0fd5-904d-a888a8d6e4e7" [ 625.268741] env[68233]: _type = "Task" [ 625.268741] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.283742] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52542105-c045-0fd5-904d-a888a8d6e4e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.501217] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.564935] env[68233]: DEBUG nova.scheduler.client.report [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 625.575318] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781843, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.680732] env[68233]: DEBUG nova.network.neutron [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.692844] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.693037] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.721243] env[68233]: INFO nova.compute.manager [-] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Took 1.03 seconds to deallocate network for instance. [ 625.785026] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52542105-c045-0fd5-904d-a888a8d6e4e7, 'name': SearchDatastore_Task, 'duration_secs': 0.018028} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.785378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.785640] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] a340c66c-74eb-43e5-8e72-54d9c8b07a26/a340c66c-74eb-43e5-8e72-54d9c8b07a26.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 625.785897] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc71a96a-46d0-4393-b240-3b6cc8325243 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.795078] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 625.795078] env[68233]: value = "task-2781844" [ 625.795078] env[68233]: _type = "Task" [ 625.795078] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.805719] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781844, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.923707] env[68233]: DEBUG nova.network.neutron [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Updating instance_info_cache with network_info: [{"id": "c00757db-7911-4503-932f-f300752512de", "address": "fa:16:3e:50:b0:f8", "network": {"id": "f90be3fc-e721-48ce-ba8a-9e23975df0dd", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1958764680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cabc700bdda94bfe87c4a418aeb40e25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc00757db-79", "ovs_interfaceid": "c00757db-7911-4503-932f-f300752512de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.005039] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781842, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.069693] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781843, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.079656] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.079845] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 626.087221] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.574s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.087221] env[68233]: DEBUG nova.objects.instance [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lazy-loading 'resources' on Instance uuid 102187bd-0cb2-4496-8dd0-9101b24ee4fa {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 626.209625] env[68233]: DEBUG nova.compute.manager [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Received event network-changed-f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 626.210021] env[68233]: DEBUG nova.compute.manager [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Refreshing instance network info cache due to event network-changed-f1ca2437-5a7d-4e37-9f83-6cbec685618e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 626.210429] env[68233]: DEBUG oslo_concurrency.lockutils [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] Acquiring lock "refresh_cache-a340c66c-74eb-43e5-8e72-54d9c8b07a26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.210768] env[68233]: DEBUG oslo_concurrency.lockutils [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] Acquired lock "refresh_cache-a340c66c-74eb-43e5-8e72-54d9c8b07a26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.211055] env[68233]: DEBUG nova.network.neutron [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Refreshing network info cache for port f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 626.228614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.311174] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781844, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51146} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.311174] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] a340c66c-74eb-43e5-8e72-54d9c8b07a26/a340c66c-74eb-43e5-8e72-54d9c8b07a26.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 626.311174] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 626.311174] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-42df4b96-dfa7-4710-a321-55453f436a76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.321716] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 626.321716] env[68233]: value = "task-2781845" [ 626.321716] env[68233]: _type = "Task" [ 626.321716] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.336238] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781845, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.427862] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Releasing lock "refresh_cache-d19421ad-88d5-4479-a6e4-c6d59e863b31" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.428363] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Instance network_info: |[{"id": "c00757db-7911-4503-932f-f300752512de", "address": "fa:16:3e:50:b0:f8", "network": {"id": "f90be3fc-e721-48ce-ba8a-9e23975df0dd", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1958764680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cabc700bdda94bfe87c4a418aeb40e25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc00757db-79", "ovs_interfaceid": "c00757db-7911-4503-932f-f300752512de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 626.428678] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:b0:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '023d6500-887e-4dc4-bec5-06b40450d9c0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c00757db-7911-4503-932f-f300752512de', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.444091] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Creating folder: Project (cabc700bdda94bfe87c4a418aeb40e25). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.444091] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28895564-16c3-49dc-905d-ee28b314b52d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.459126] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Created folder: Project (cabc700bdda94bfe87c4a418aeb40e25) in parent group-v559223. [ 626.459126] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Creating folder: Instances. Parent ref: group-v559275. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.459126] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcdf4d1f-cad9-4e79-839f-b9628c3a20af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.474566] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Created folder: Instances in parent group-v559275. [ 626.474566] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 626.474566] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.474566] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34b24563-89c2-4ec7-87aa-a102f36b4c56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.496947] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.496947] env[68233]: value = "task-2781848" [ 626.496947] env[68233]: _type = "Task" [ 626.496947] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.500580] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781842, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.515036] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781848, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.562788] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781843, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.586453] env[68233]: DEBUG nova.compute.utils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 626.587959] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 626.588560] env[68233]: DEBUG nova.network.neutron [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 626.663952] env[68233]: DEBUG nova.policy [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '839b33e7aa11482882403ddc2319583f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '853a057cfba3400ba05c89cb1d292f61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 626.845369] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781845, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07505} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.848453] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.852506] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d4c2b7-c23e-4039-a1dd-6fd01af363a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.885057] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] a340c66c-74eb-43e5-8e72-54d9c8b07a26/a340c66c-74eb-43e5-8e72-54d9c8b07a26.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.888693] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b1f4873-05d0-4d0b-8194-b958d0092657 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.920023] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 626.920023] env[68233]: value = "task-2781849" [ 626.920023] env[68233]: _type = "Task" [ 626.920023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.936864] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781849, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.007323] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781842, 'name': ReconfigVM_Task, 'duration_secs': 1.709786} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.008344] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 11ec9800-fa7e-4dbd-bdc1-63d0b496589f/11ec9800-fa7e-4dbd-bdc1-63d0b496589f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.008639] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-430898db-79b7-40dd-a7d7-3d54d715f74f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.018273] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781848, 'name': CreateVM_Task, 'duration_secs': 0.475851} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.021660] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.024249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.024249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.024249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.024249] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceeee4a2-2832-4c9e-93bf-7bc8d1b0c45e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.027699] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 627.027699] env[68233]: value = "task-2781850" [ 627.027699] env[68233]: _type = "Task" [ 627.027699] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.029115] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 627.029115] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cc2ffd-1a98-ee31-ad4a-89587c2a9d30" [ 627.029115] env[68233]: _type = "Task" [ 627.029115] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.054445] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781850, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.064047] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cc2ffd-1a98-ee31-ad4a-89587c2a9d30, 'name': SearchDatastore_Task, 'duration_secs': 0.011256} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.067074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.067357] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.067629] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.068473] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.068674] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 627.069833] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca26e7a5-1431-4566-8974-d5f27403a758 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.077301] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781843, 'name': CloneVM_Task, 'duration_secs': 1.841165} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.077301] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Created linked-clone VM from snapshot [ 627.077301] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f89096-5163-4ec2-8658-49e74405d002 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.082236] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 627.082236] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 627.083961] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91953d48-1cf4-4258-aa45-4cf37eafff4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.092903] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Uploading image cd8ef9a9-6561-4507-b734-8e61fe2038c9 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 627.098328] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 627.106359] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 627.106359] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524bf3f7-702a-fc63-c5fc-fb850ff0f9a9" [ 627.106359] env[68233]: _type = "Task" [ 627.106359] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.114949] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 627.114949] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-98a37d52-fb21-47e4-94b4-271347168bb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.129017] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524bf3f7-702a-fc63-c5fc-fb850ff0f9a9, 'name': SearchDatastore_Task, 'duration_secs': 0.011311} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.131756] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 627.131756] env[68233]: value = "task-2781851" [ 627.131756] env[68233]: _type = "Task" [ 627.131756] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.132295] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a8970d3-bc63-4fa7-b744-81d2605e49d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.150024] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 627.150024] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d34f4a-853b-bc74-36d9-a89760dffc26" [ 627.150024] env[68233]: _type = "Task" [ 627.150024] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.153487] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781851, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.166165] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d34f4a-853b-bc74-36d9-a89760dffc26, 'name': SearchDatastore_Task, 'duration_secs': 0.0142} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.166695] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.167781] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d19421ad-88d5-4479-a6e4-c6d59e863b31/d19421ad-88d5-4479-a6e4-c6d59e863b31.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.167781] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea5d73cb-18ff-4f42-a4a3-5081d03c9fca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.179355] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 627.179355] env[68233]: value = "task-2781852" [ 627.179355] env[68233]: _type = "Task" [ 627.179355] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.191849] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.232775] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d8d6c9-cf2f-45d7-a9c9-1cd5a6d25147 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.245089] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d350acff-0be5-489a-84ec-3da87b2c7de5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.289995] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675ad2cb-d0ae-4da7-8feb-d0724d7eba69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.298768] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1f1853-dad1-4bf5-a363-8434f705b1c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.318676] env[68233]: DEBUG nova.compute.provider_tree [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 627.403714] env[68233]: DEBUG nova.network.neutron [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Successfully created port: 2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.436364] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781849, 'name': ReconfigVM_Task, 'duration_secs': 0.354454} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.437291] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Reconfigured VM instance instance-0000000f to attach disk [datastore2] a340c66c-74eb-43e5-8e72-54d9c8b07a26/a340c66c-74eb-43e5-8e72-54d9c8b07a26.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 627.437981] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f0e58cf-911b-4b14-bb42-983ce9ec089e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.446453] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 627.446453] env[68233]: value = "task-2781853" [ 627.446453] env[68233]: _type = "Task" [ 627.446453] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.457454] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781853, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.539664] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781850, 'name': Rename_Task, 'duration_secs': 0.256018} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.539952] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 627.540212] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8caa04d-5b32-4680-a24c-fe4acb0b9316 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.549547] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 627.549547] env[68233]: value = "task-2781854" [ 627.549547] env[68233]: _type = "Task" [ 627.549547] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.559017] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.650792] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781851, 'name': Destroy_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.694221] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781852, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.733999] env[68233]: DEBUG nova.network.neutron [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Updated VIF entry in instance network info cache for port f1ca2437-5a7d-4e37-9f83-6cbec685618e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 627.734408] env[68233]: DEBUG nova.network.neutron [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Updating instance_info_cache with network_info: [{"id": "f1ca2437-5a7d-4e37-9f83-6cbec685618e", "address": "fa:16:3e:d2:21:20", "network": {"id": "9a0919c0-3110-4f87-aa64-3a2632e6c0cf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1132455188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90f2c4afe61469fa4a081e470058fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf1ca2437-5a", "ovs_interfaceid": "f1ca2437-5a7d-4e37-9f83-6cbec685618e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.826685] env[68233]: DEBUG nova.scheduler.client.report [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 627.962217] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781853, 'name': Rename_Task, 'duration_secs': 0.494512} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.963327] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 627.963782] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-692ee029-e44f-4daa-bdc2-5e93f0177c8f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.976654] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 627.976654] env[68233]: value = "task-2781855" [ 627.976654] env[68233]: _type = "Task" [ 627.976654] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.992901] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781855, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.069123] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781854, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.116946] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 628.148637] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781851, 'name': Destroy_Task, 'duration_secs': 0.8953} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.150771] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Destroyed the VM [ 628.151111] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 628.154031] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ac1045b8-91ad-4924-b5e5-a0de0d733e17 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.158179] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 628.158579] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.158579] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 628.158791] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.158923] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 628.159205] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 628.159570] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 628.159661] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 628.160062] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 628.160300] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 628.160300] env[68233]: DEBUG nova.virt.hardware [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 628.161272] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c414b5-1ed6-4494-bcb3-7950e3652aa2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.166844] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 628.166844] env[68233]: value = "task-2781856" [ 628.166844] env[68233]: _type = "Task" [ 628.166844] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.179024] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7daa36-b97b-47ef-bea4-cb13feb74154 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.185523] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781856, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.210117] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789089} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.210671] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d19421ad-88d5-4479-a6e4-c6d59e863b31/d19421ad-88d5-4479-a6e4-c6d59e863b31.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 628.210671] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 628.210986] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9896c7a6-d9a5-41ed-a562-2cd8757277d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.222465] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 628.222465] env[68233]: value = "task-2781857" [ 628.222465] env[68233]: _type = "Task" [ 628.222465] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.235350] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781857, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.242298] env[68233]: DEBUG oslo_concurrency.lockutils [req-c13885fb-1b5b-43dc-b001-a8ccc5cc17cb req-d515d175-2d16-4a73-90dd-42eda2fdcd5c service nova] Releasing lock "refresh_cache-a340c66c-74eb-43e5-8e72-54d9c8b07a26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.333757] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.248s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.337483] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.413s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.339088] env[68233]: INFO nova.compute.claims [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.376434] env[68233]: INFO nova.scheduler.client.report [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Deleted allocations for instance 102187bd-0cb2-4496-8dd0-9101b24ee4fa [ 628.491821] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781855, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.573455] env[68233]: DEBUG oslo_vmware.api [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2781854, 'name': PowerOnVM_Task, 'duration_secs': 0.767426} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.573901] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 628.574000] env[68233]: INFO nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Took 11.50 seconds to spawn the instance on the hypervisor. [ 628.574219] env[68233]: DEBUG nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 628.574935] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ef4999-c286-45c9-89d2-3a58f13dc157 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.679862] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781856, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.732225] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781857, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076653} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.732225] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.732961] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da495a61-ef7c-4010-85d5-fe286b09209e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.757369] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] d19421ad-88d5-4479-a6e4-c6d59e863b31/d19421ad-88d5-4479-a6e4-c6d59e863b31.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.758089] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9bb8f92-a0bc-4fa2-823f-27fc4c84ba60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.784861] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 628.784861] env[68233]: value = "task-2781858" [ 628.784861] env[68233]: _type = "Task" [ 628.784861] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.794486] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781858, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.886045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-066f2296-4049-4048-8e45-02bae73db969 tempest-DeleteServersAdminTestJSON-85028010 tempest-DeleteServersAdminTestJSON-85028010-project-admin] Lock "102187bd-0cb2-4496-8dd0-9101b24ee4fa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.820s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.993619] env[68233]: DEBUG oslo_vmware.api [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781855, 'name': PowerOnVM_Task, 'duration_secs': 0.570077} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.994069] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 628.994351] env[68233]: INFO nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Took 9.22 seconds to spawn the instance on the hypervisor. [ 628.994543] env[68233]: DEBUG nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 628.995375] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-001ffc02-c0a9-4633-afa7-db5a3021fbed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.102422] env[68233]: INFO nova.compute.manager [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Took 31.44 seconds to build instance. [ 629.180625] env[68233]: DEBUG oslo_vmware.api [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781856, 'name': RemoveSnapshot_Task, 'duration_secs': 0.862561} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.180975] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 629.298711] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.425041] env[68233]: DEBUG nova.compute.manager [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Received event network-changed-cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 629.425252] env[68233]: DEBUG nova.compute.manager [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Refreshing instance network info cache due to event network-changed-cc05db07-a36a-494d-92b6-af58fdd9d143. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 629.425462] env[68233]: DEBUG oslo_concurrency.lockutils [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.425717] env[68233]: DEBUG oslo_concurrency.lockutils [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.425856] env[68233]: DEBUG nova.network.neutron [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Refreshing network info cache for port cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.448580] env[68233]: DEBUG nova.compute.manager [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Received event network-vif-plugged-c00757db-7911-4503-932f-f300752512de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 629.448580] env[68233]: DEBUG oslo_concurrency.lockutils [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] Acquiring lock "d19421ad-88d5-4479-a6e4-c6d59e863b31-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.448580] env[68233]: DEBUG oslo_concurrency.lockutils [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.448580] env[68233]: DEBUG oslo_concurrency.lockutils [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.448580] env[68233]: DEBUG nova.compute.manager [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] No waiting events found dispatching network-vif-plugged-c00757db-7911-4503-932f-f300752512de {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 629.448847] env[68233]: WARNING nova.compute.manager [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Received unexpected event network-vif-plugged-c00757db-7911-4503-932f-f300752512de for instance with vm_state building and task_state spawning. [ 629.449090] env[68233]: DEBUG nova.compute.manager [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Received event network-changed-c00757db-7911-4503-932f-f300752512de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 629.449220] env[68233]: DEBUG nova.compute.manager [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Refreshing instance network info cache due to event network-changed-c00757db-7911-4503-932f-f300752512de. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 629.449424] env[68233]: DEBUG oslo_concurrency.lockutils [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] Acquiring lock "refresh_cache-d19421ad-88d5-4479-a6e4-c6d59e863b31" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.449560] env[68233]: DEBUG oslo_concurrency.lockutils [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] Acquired lock "refresh_cache-d19421ad-88d5-4479-a6e4-c6d59e863b31" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.449713] env[68233]: DEBUG nova.network.neutron [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Refreshing network info cache for port c00757db-7911-4503-932f-f300752512de {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.458034] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "88d67405-b8c6-484a-b178-68a8babb3708" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.458259] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "88d67405-b8c6-484a-b178-68a8babb3708" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.520677] env[68233]: INFO nova.compute.manager [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Took 31.44 seconds to build instance. [ 629.592155] env[68233]: DEBUG nova.network.neutron [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Successfully updated port: 2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 629.608023] env[68233]: DEBUG oslo_concurrency.lockutils [None req-76eddc9b-18f7-4c43-a71f-9be64aee2479 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.960s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.686606] env[68233]: WARNING nova.compute.manager [None req-fa3dd85d-9c3f-4ecc-98d8-ed051c8ce1f9 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Image not found during snapshot: nova.exception.ImageNotFound: Image cd8ef9a9-6561-4507-b734-8e61fe2038c9 could not be found. [ 629.802843] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781858, 'name': ReconfigVM_Task, 'duration_secs': 0.835075} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.803784] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Reconfigured VM instance instance-00000010 to attach disk [datastore2] d19421ad-88d5-4479-a6e4-c6d59e863b31/d19421ad-88d5-4479-a6e4-c6d59e863b31.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.804071] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-20213ff2-c5c9-4aba-b5c6-2347ce41f5a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.813620] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 629.813620] env[68233]: value = "task-2781859" [ 629.813620] env[68233]: _type = "Task" [ 629.813620] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.819153] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb32b7fa-7dd3-4fd3-9c38-156e58fd16ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.834918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "636b6b36-3ab5-4851-a232-d27b54895595" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.835174] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "636b6b36-3ab5-4851-a232-d27b54895595" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.835646] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781859, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.836699] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c227dcd-1ce8-43fa-8dc7-ca777cfb1ec8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.875050] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b9ab4d-f19a-49d3-a290-0ef0b7b3d86a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.884857] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a165cb98-f986-4be9-a4c0-7a3aaf36e78d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.901700] env[68233]: DEBUG nova.compute.provider_tree [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.022868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-faf13183-a8c4-4c6c-ab41-4c99f55310b3 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.955s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.096336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "refresh_cache-19a1441d-9621-4e6e-ac38-8ad08206facf" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 630.096336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "refresh_cache-19a1441d-9621-4e6e-ac38-8ad08206facf" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 630.096336] env[68233]: DEBUG nova.network.neutron [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 630.116858] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 630.245734] env[68233]: DEBUG nova.network.neutron [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updated VIF entry in instance network info cache for port cc05db07-a36a-494d-92b6-af58fdd9d143. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.246061] env[68233]: DEBUG nova.network.neutron [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.325071] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781859, 'name': Rename_Task, 'duration_secs': 0.215625} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.325369] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 630.325609] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b13561d-5f53-48ff-8867-049de1656ef2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.336433] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 630.336433] env[68233]: value = "task-2781860" [ 630.336433] env[68233]: _type = "Task" [ 630.336433] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.348281] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781860, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.405433] env[68233]: DEBUG nova.scheduler.client.report [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 630.421939] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquiring lock "3c9b701e-6461-45e3-8654-3291c5a487b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.422207] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.490364] env[68233]: DEBUG nova.network.neutron [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Updated VIF entry in instance network info cache for port c00757db-7911-4503-932f-f300752512de. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.490776] env[68233]: DEBUG nova.network.neutron [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Updating instance_info_cache with network_info: [{"id": "c00757db-7911-4503-932f-f300752512de", "address": "fa:16:3e:50:b0:f8", "network": {"id": "f90be3fc-e721-48ce-ba8a-9e23975df0dd", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1958764680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cabc700bdda94bfe87c4a418aeb40e25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "023d6500-887e-4dc4-bec5-06b40450d9c0", "external-id": "nsx-vlan-transportzone-108", "segmentation_id": 108, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc00757db-79", "ovs_interfaceid": "c00757db-7911-4503-932f-f300752512de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.525717] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 630.648037] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.677767] env[68233]: DEBUG nova.network.neutron [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.743886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "080ab438-269b-427a-9ee9-71c59d9c2a91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.744077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.752768] env[68233]: DEBUG oslo_concurrency.lockutils [req-23865c22-6bd8-4b82-9676-c58f0b23dcfb req-a9f67e1e-dd3b-4f6d-8275-e6c9bfb1627d service nova] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.852192] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781860, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.911195] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.912092] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 630.915181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 22.702s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.975315] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.975569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 630.995568] env[68233]: DEBUG oslo_concurrency.lockutils [req-8adadbe6-9996-4294-8ff2-7ceea7f80b22 req-e3c5f9e2-2d36-4fc5-aef9-d49e6ed1d509 service nova] Releasing lock "refresh_cache-d19421ad-88d5-4479-a6e4-c6d59e863b31" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.046955] env[68233]: DEBUG nova.network.neutron [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Updating instance_info_cache with network_info: [{"id": "2e7be8f9-f275-4c54-ab25-eaa64558351c", "address": "fa:16:3e:6e:ab:bf", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e7be8f9-f2", "ovs_interfaceid": "2e7be8f9-f275-4c54-ab25-eaa64558351c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.065182] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.355651] env[68233]: DEBUG oslo_vmware.api [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781860, 'name': PowerOnVM_Task, 'duration_secs': 0.874396} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.355975] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 631.356140] env[68233]: INFO nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Took 7.97 seconds to spawn the instance on the hypervisor. [ 631.356312] env[68233]: DEBUG nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 631.357208] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89572da1-2017-4712-993f-d286fa25e5f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.422855] env[68233]: INFO nova.compute.claims [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.426445] env[68233]: DEBUG nova.compute.utils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 631.430022] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 631.430022] env[68233]: DEBUG nova.network.neutron [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 631.537890] env[68233]: DEBUG nova.policy [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '236b431e2f194bb2a4bc0090158d54b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e90f2c4afe61469fa4a081e470058fc7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 631.550318] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "refresh_cache-19a1441d-9621-4e6e-ac38-8ad08206facf" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.550630] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Instance network_info: |[{"id": "2e7be8f9-f275-4c54-ab25-eaa64558351c", "address": "fa:16:3e:6e:ab:bf", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e7be8f9-f2", "ovs_interfaceid": "2e7be8f9-f275-4c54-ab25-eaa64558351c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 631.553201] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:ab:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2e7be8f9-f275-4c54-ab25-eaa64558351c', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 631.561981] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating folder: Project (853a057cfba3400ba05c89cb1d292f61). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.562853] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-54c7c7db-548d-47b9-b4a4-99bbcf3a86ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.577046] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created folder: Project (853a057cfba3400ba05c89cb1d292f61) in parent group-v559223. [ 631.577267] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating folder: Instances. Parent ref: group-v559278. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 631.577503] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3528acb9-3692-442f-9c46-d9fa66649b88 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.602874] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created folder: Instances in parent group-v559278. [ 631.604485] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 631.604485] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 631.604485] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e64bd920-13f7-45f5-ae7c-8d7a1e34b790 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.627206] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 631.627206] env[68233]: value = "task-2781863" [ 631.627206] env[68233]: _type = "Task" [ 631.627206] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.640786] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781863, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.880016] env[68233]: INFO nova.compute.manager [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Took 30.47 seconds to build instance. [ 631.930991] env[68233]: INFO nova.compute.resource_tracker [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating resource usage from migration f9c4180d-3134-46ba-8082-85301d976f9c [ 631.937019] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 632.066018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "38c86c2b-9b2b-482e-b26d-066208467202" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.066018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "38c86c2b-9b2b-482e-b26d-066208467202" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.066240] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "38c86c2b-9b2b-482e-b26d-066208467202-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.066569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "38c86c2b-9b2b-482e-b26d-066208467202-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.066887] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "38c86c2b-9b2b-482e-b26d-066208467202-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.070219] env[68233]: INFO nova.compute.manager [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Terminating instance [ 632.137027] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781863, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.327997] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b22dee-a10e-4401-bfb1-18f196f71565 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.336232] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec39413-6b1f-42f8-9b27-671ac00d28c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.375802] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab78c317-f8cc-46a6-a060-ad3511d31250 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.385020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-12864938-39bb-48bf-be6e-2d274d56aabc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.987s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.386514] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b1bf0f-5b21-48ee-9cf3-5165f7a9369f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.403756] env[68233]: DEBUG nova.compute.provider_tree [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.496943] env[68233]: DEBUG nova.network.neutron [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Successfully created port: dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.525125] env[68233]: DEBUG nova.compute.manager [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Received event network-vif-plugged-2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 632.525357] env[68233]: DEBUG oslo_concurrency.lockutils [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] Acquiring lock "19a1441d-9621-4e6e-ac38-8ad08206facf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.525562] env[68233]: DEBUG oslo_concurrency.lockutils [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.525733] env[68233]: DEBUG oslo_concurrency.lockutils [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.525878] env[68233]: DEBUG nova.compute.manager [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] No waiting events found dispatching network-vif-plugged-2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 632.526379] env[68233]: WARNING nova.compute.manager [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Received unexpected event network-vif-plugged-2e7be8f9-f275-4c54-ab25-eaa64558351c for instance with vm_state building and task_state spawning. [ 632.526643] env[68233]: DEBUG nova.compute.manager [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Received event network-changed-2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 632.527164] env[68233]: DEBUG nova.compute.manager [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Refreshing instance network info cache due to event network-changed-2e7be8f9-f275-4c54-ab25-eaa64558351c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 632.528225] env[68233]: DEBUG oslo_concurrency.lockutils [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] Acquiring lock "refresh_cache-19a1441d-9621-4e6e-ac38-8ad08206facf" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.528818] env[68233]: DEBUG oslo_concurrency.lockutils [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] Acquired lock "refresh_cache-19a1441d-9621-4e6e-ac38-8ad08206facf" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.529254] env[68233]: DEBUG nova.network.neutron [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Refreshing network info cache for port 2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 632.575125] env[68233]: DEBUG nova.compute.manager [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 632.575125] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.575125] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a901f0b4-830e-4311-b8d0-da285791cf62 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.582624] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.582897] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e11d1a3-2a7a-4711-acae-1877d8280904 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.591539] env[68233]: DEBUG oslo_vmware.api [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 632.591539] env[68233]: value = "task-2781864" [ 632.591539] env[68233]: _type = "Task" [ 632.591539] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.603449] env[68233]: DEBUG oslo_vmware.api [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.638441] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781863, 'name': CreateVM_Task, 'duration_secs': 0.721901} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.638667] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 632.639449] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.639611] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.639922] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 632.640188] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-633036f9-059b-43e3-b53f-16c107ae5b76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.646315] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 632.646315] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e68e91-ebe5-27ed-1af6-4753c1b29eda" [ 632.646315] env[68233]: _type = "Task" [ 632.646315] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.655541] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e68e91-ebe5-27ed-1af6-4753c1b29eda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.893057] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 632.915092] env[68233]: DEBUG nova.scheduler.client.report [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.945347] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 632.986626] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 632.986948] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 632.987496] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 632.990015] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 632.990015] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 632.990015] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 632.990015] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 632.990015] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 632.990502] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 632.990502] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 632.990502] env[68233]: DEBUG nova.virt.hardware [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 632.992252] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917a29ff-a791-431c-9d15-8995102eb53d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.005936] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f918608-456f-47a4-afac-ea5586087632 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.113049] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.113896] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.117015] env[68233]: DEBUG oslo_vmware.api [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781864, 'name': PowerOffVM_Task, 'duration_secs': 0.207049} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.117400] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4289c980-b115-42c0-bc9a-4e2aac873b49 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "bee2e1c1-5803-419e-9606-24b1d1abcd52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.117514] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4289c980-b115-42c0-bc9a-4e2aac873b49 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "bee2e1c1-5803-419e-9606-24b1d1abcd52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.117765] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.118266] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.118703] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e01bd91-9387-4012-89ab-669034ddfb09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.161011] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e68e91-ebe5-27ed-1af6-4753c1b29eda, 'name': SearchDatastore_Task, 'duration_secs': 0.011733} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.164053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.164646] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 633.164924] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.165507] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.165507] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 633.165914] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79c70ddf-0671-45e4-b405-86829c9dd7b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.175582] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 633.175768] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 633.176520] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91c0d25e-bfde-428b-b777-a023dcb65546 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.183462] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 633.183462] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52456294-4058-a1e4-5fa3-31ccc61da03e" [ 633.183462] env[68233]: _type = "Task" [ 633.183462] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.196160] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52456294-4058-a1e4-5fa3-31ccc61da03e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.198395] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.198629] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.198866] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleting the datastore file [datastore2] 38c86c2b-9b2b-482e-b26d-066208467202 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.199131] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48b46018-f13c-4143-b243-904c112eb46d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.207646] env[68233]: DEBUG oslo_vmware.api [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 633.207646] env[68233]: value = "task-2781866" [ 633.207646] env[68233]: _type = "Task" [ 633.207646] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.218216] env[68233]: DEBUG oslo_vmware.api [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.332673] env[68233]: DEBUG nova.network.neutron [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Updated VIF entry in instance network info cache for port 2e7be8f9-f275-4c54-ab25-eaa64558351c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 633.333015] env[68233]: DEBUG nova.network.neutron [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Updating instance_info_cache with network_info: [{"id": "2e7be8f9-f275-4c54-ab25-eaa64558351c", "address": "fa:16:3e:6e:ab:bf", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2e7be8f9-f2", "ovs_interfaceid": "2e7be8f9-f275-4c54-ab25-eaa64558351c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.418164] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.503s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.418431] env[68233]: INFO nova.compute.manager [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Migrating [ 633.420108] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.420108] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.422639] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.778s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.424597] env[68233]: INFO nova.compute.claims [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.432996] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.698372] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52456294-4058-a1e4-5fa3-31ccc61da03e, 'name': SearchDatastore_Task, 'duration_secs': 0.015542} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.699345] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac02384f-3002-4176-ab31-48048294aae8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.706220] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 633.706220] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a39606-9e78-23bd-1c86-95e5f9b7ad19" [ 633.706220] env[68233]: _type = "Task" [ 633.706220] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.724366] env[68233]: DEBUG oslo_vmware.api [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2781866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211831} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.726505] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a39606-9e78-23bd-1c86-95e5f9b7ad19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.726505] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 633.726505] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 633.726505] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.726505] env[68233]: INFO nova.compute.manager [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Took 1.15 seconds to destroy the instance on the hypervisor. [ 633.726717] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.726717] env[68233]: DEBUG nova.compute.manager [-] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 633.726717] env[68233]: DEBUG nova.network.neutron [-] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.838795] env[68233]: DEBUG oslo_concurrency.lockutils [req-49fc0d46-3249-4b82-b204-edc2bf79eca9 req-e4daa2b3-1ad0-4d62-90a2-d107ebd7d283 service nova] Releasing lock "refresh_cache-19a1441d-9621-4e6e-ac38-8ad08206facf" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.930750] env[68233]: INFO nova.compute.rpcapi [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 633.930750] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.219688] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a39606-9e78-23bd-1c86-95e5f9b7ad19, 'name': SearchDatastore_Task, 'duration_secs': 0.023487} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.220033] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.220370] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 19a1441d-9621-4e6e-ac38-8ad08206facf/19a1441d-9621-4e6e-ac38-8ad08206facf.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 634.221720] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb665810-742b-4961-b5b3-232b451d8510 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.233227] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 634.233227] env[68233]: value = "task-2781867" [ 634.233227] env[68233]: _type = "Task" [ 634.233227] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.247660] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.468244] env[68233]: DEBUG nova.network.neutron [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Successfully updated port: dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 634.471999] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.476026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.476026] env[68233]: DEBUG nova.network.neutron [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.559896] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "d19421ad-88d5-4479-a6e4-c6d59e863b31" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.560299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.560657] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "d19421ad-88d5-4479-a6e4-c6d59e863b31-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.560737] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.560914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.563585] env[68233]: INFO nova.compute.manager [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Terminating instance [ 634.577208] env[68233]: DEBUG nova.network.neutron [-] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.749855] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781867, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.753297] env[68233]: DEBUG nova.compute.manager [req-9097c1dd-7814-454b-8549-693479050c13 req-34dc0dad-235b-4a9b-9286-042326694327 service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Received event network-vif-plugged-dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 634.753573] env[68233]: DEBUG oslo_concurrency.lockutils [req-9097c1dd-7814-454b-8549-693479050c13 req-34dc0dad-235b-4a9b-9286-042326694327 service nova] Acquiring lock "90d88fcb-6141-499c-b049-ddfc9e210d5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 634.753815] env[68233]: DEBUG oslo_concurrency.lockutils [req-9097c1dd-7814-454b-8549-693479050c13 req-34dc0dad-235b-4a9b-9286-042326694327 service nova] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 634.754034] env[68233]: DEBUG oslo_concurrency.lockutils [req-9097c1dd-7814-454b-8549-693479050c13 req-34dc0dad-235b-4a9b-9286-042326694327 service nova] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.754266] env[68233]: DEBUG nova.compute.manager [req-9097c1dd-7814-454b-8549-693479050c13 req-34dc0dad-235b-4a9b-9286-042326694327 service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] No waiting events found dispatching network-vif-plugged-dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 634.755014] env[68233]: WARNING nova.compute.manager [req-9097c1dd-7814-454b-8549-693479050c13 req-34dc0dad-235b-4a9b-9286-042326694327 service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Received unexpected event network-vif-plugged-dfc4fcf9-323d-437d-a092-4763f3b7d0e5 for instance with vm_state building and task_state spawning. [ 634.971908] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e9e53f-a508-43f7-8493-3eb44f49d2f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.977212] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "refresh_cache-90d88fcb-6141-499c-b049-ddfc9e210d5c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.977513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "refresh_cache-90d88fcb-6141-499c-b049-ddfc9e210d5c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.977582] env[68233]: DEBUG nova.network.neutron [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.992164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f211fc8-5541-4f1e-b4bc-3bec302f08fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.033453] env[68233]: DEBUG nova.compute.manager [req-853a83e5-e18e-43e0-86a8-d8a4e51f9998 req-c87e7636-5b4c-46b6-b107-b4aa71ebd1aa service nova] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Received event network-vif-deleted-486238a1-39d6-463c-8bb2-8fd9577c7798 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 635.034919] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7836f37-815c-4bf9-b386-be44d3253750 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.044201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a19790-1278-4e07-b663-74899cffc7f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.066963] env[68233]: DEBUG nova.compute.provider_tree [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 635.072484] env[68233]: DEBUG nova.compute.manager [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 635.072692] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 635.075895] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b2ded4-07e4-458c-9ea9-5ba87d8f3472 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.078712] env[68233]: INFO nova.compute.manager [-] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Took 1.35 seconds to deallocate network for instance. [ 635.088861] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 635.089122] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d500b2e-8ac8-4742-95e7-cfb86a6a76ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.097326] env[68233]: DEBUG oslo_vmware.api [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 635.097326] env[68233]: value = "task-2781868" [ 635.097326] env[68233]: _type = "Task" [ 635.097326] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.106437] env[68233]: DEBUG oslo_vmware.api [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.250701] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.877809} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.250701] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 19a1441d-9621-4e6e-ac38-8ad08206facf/19a1441d-9621-4e6e-ac38-8ad08206facf.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 635.250701] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 635.250701] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24aabe9e-ac0f-464b-8064-1719e2cad025 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.267222] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 635.267222] env[68233]: value = "task-2781869" [ 635.267222] env[68233]: _type = "Task" [ 635.267222] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.278515] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781869, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.308153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "5ed44950-8e9b-4f42-9611-d5bff01dc905" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.308417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.395437] env[68233]: DEBUG nova.network.neutron [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance_info_cache with network_info: [{"id": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "address": "fa:16:3e:89:49:e6", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749b7b47-86", "ovs_interfaceid": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.550105] env[68233]: DEBUG nova.network.neutron [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 635.570680] env[68233]: DEBUG nova.scheduler.client.report [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 635.590658] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.607935] env[68233]: DEBUG oslo_vmware.api [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781868, 'name': PowerOffVM_Task, 'duration_secs': 0.332737} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.609458] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 635.611918] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 635.611918] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a621cff6-8ce8-496f-870c-76802a71a929 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.718480] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 635.718480] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 635.718480] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Deleting the datastore file [datastore2] d19421ad-88d5-4479-a6e4-c6d59e863b31 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 635.718480] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d29e4d19-cf61-4eef-8583-96f9dd1da794 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.729287] env[68233]: DEBUG oslo_vmware.api [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for the task: (returnval){ [ 635.729287] env[68233]: value = "task-2781871" [ 635.729287] env[68233]: _type = "Task" [ 635.729287] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.740421] env[68233]: DEBUG oslo_vmware.api [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.744870] env[68233]: DEBUG nova.network.neutron [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Updating instance_info_cache with network_info: [{"id": "dfc4fcf9-323d-437d-a092-4763f3b7d0e5", "address": "fa:16:3e:97:a4:7d", "network": {"id": "9a0919c0-3110-4f87-aa64-3a2632e6c0cf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1132455188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90f2c4afe61469fa4a081e470058fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfc4fcf9-32", "ovs_interfaceid": "dfc4fcf9-323d-437d-a092-4763f3b7d0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.782113] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781869, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149012} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.782113] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 635.783271] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfbd53b-7869-46e6-b485-4e40e6de7022 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.812022] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] 19a1441d-9621-4e6e-ac38-8ad08206facf/19a1441d-9621-4e6e-ac38-8ad08206facf.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 635.812778] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a71f93d7-6b10-4951-a1f5-4e848d545f53 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.836529] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 635.836529] env[68233]: value = "task-2781872" [ 635.836529] env[68233]: _type = "Task" [ 635.836529] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.847826] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781872, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.898957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.079865] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.659s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.080847] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 636.086094] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.380s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.087776] env[68233]: INFO nova.compute.claims [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.240693] env[68233]: DEBUG oslo_vmware.api [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Task: {'id': task-2781871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389823} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.241665] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 636.241928] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 636.242180] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 636.242546] env[68233]: INFO nova.compute.manager [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Took 1.17 seconds to destroy the instance on the hypervisor. [ 636.242744] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 636.242908] env[68233]: DEBUG nova.compute.manager [-] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 636.243042] env[68233]: DEBUG nova.network.neutron [-] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 636.251014] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "refresh_cache-90d88fcb-6141-499c-b049-ddfc9e210d5c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.251014] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Instance network_info: |[{"id": "dfc4fcf9-323d-437d-a092-4763f3b7d0e5", "address": "fa:16:3e:97:a4:7d", "network": {"id": "9a0919c0-3110-4f87-aa64-3a2632e6c0cf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1132455188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90f2c4afe61469fa4a081e470058fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfc4fcf9-32", "ovs_interfaceid": "dfc4fcf9-323d-437d-a092-4763f3b7d0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 636.251246] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:a4:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496ac502-bfc4-4324-8332-cac473eb7cc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dfc4fcf9-323d-437d-a092-4763f3b7d0e5', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 636.256599] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 636.257030] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 636.257400] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-997d55da-3d60-48dd-a165-5ca4ed560424 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.287099] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 636.287099] env[68233]: value = "task-2781873" [ 636.287099] env[68233]: _type = "Task" [ 636.287099] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.297197] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781873, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.346815] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.347203] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.359792] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781872, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.595423] env[68233]: DEBUG nova.compute.utils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 636.597277] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 636.597503] env[68233]: DEBUG nova.network.neutron [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.708221] env[68233]: DEBUG nova.policy [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ce1965d456d242a9a805eedf7fce9f36', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6e239df56cc40a0a4df5756613f31be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.796526] env[68233]: DEBUG nova.compute.manager [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Received event network-changed-dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 636.796788] env[68233]: DEBUG nova.compute.manager [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Refreshing instance network info cache due to event network-changed-dfc4fcf9-323d-437d-a092-4763f3b7d0e5. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 636.797025] env[68233]: DEBUG oslo_concurrency.lockutils [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] Acquiring lock "refresh_cache-90d88fcb-6141-499c-b049-ddfc9e210d5c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.797193] env[68233]: DEBUG oslo_concurrency.lockutils [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] Acquired lock "refresh_cache-90d88fcb-6141-499c-b049-ddfc9e210d5c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.797358] env[68233]: DEBUG nova.network.neutron [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Refreshing network info cache for port dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.807716] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781873, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.853830] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781872, 'name': ReconfigVM_Task, 'duration_secs': 0.606205} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.853830] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Reconfigured VM instance instance-00000011 to attach disk [datastore2] 19a1441d-9621-4e6e-ac38-8ad08206facf/19a1441d-9621-4e6e-ac38-8ad08206facf.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 636.853830] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1abffa70-7fb6-453b-b08d-adfcac528062 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.863998] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 636.863998] env[68233]: value = "task-2781874" [ 636.863998] env[68233]: _type = "Task" [ 636.863998] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.876591] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781874, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.105230] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 637.186328] env[68233]: DEBUG nova.network.neutron [-] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.205127] env[68233]: DEBUG nova.network.neutron [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Successfully created port: 0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.302453] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781873, 'name': CreateVM_Task, 'duration_secs': 0.562829} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.303746] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 637.304493] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.304658] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.305224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 637.305941] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4051c76-5756-4e45-b00f-5893c1253508 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.313080] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 637.313080] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52653191-bc8a-8100-8df0-bbd2e38f032c" [ 637.313080] env[68233]: _type = "Task" [ 637.313080] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.323913] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52653191-bc8a-8100-8df0-bbd2e38f032c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.376665] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781874, 'name': Rename_Task, 'duration_secs': 0.164785} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.379559] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 637.380037] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-35007ff3-7c83-4968-8924-968d40fe2bec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.388647] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 637.388647] env[68233]: value = "task-2781875" [ 637.388647] env[68233]: _type = "Task" [ 637.388647] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.400746] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781875, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.415869] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb157ec-8258-46c3-bb12-49fe9a2a688f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.444459] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 637.690029] env[68233]: INFO nova.compute.manager [-] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Took 1.45 seconds to deallocate network for instance. [ 637.778042] env[68233]: DEBUG nova.network.neutron [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Updated VIF entry in instance network info cache for port dfc4fcf9-323d-437d-a092-4763f3b7d0e5. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 637.778454] env[68233]: DEBUG nova.network.neutron [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Updating instance_info_cache with network_info: [{"id": "dfc4fcf9-323d-437d-a092-4763f3b7d0e5", "address": "fa:16:3e:97:a4:7d", "network": {"id": "9a0919c0-3110-4f87-aa64-3a2632e6c0cf", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1132455188-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e90f2c4afe61469fa4a081e470058fc7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdfc4fcf9-32", "ovs_interfaceid": "dfc4fcf9-323d-437d-a092-4763f3b7d0e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.807435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b17407e-ca71-40a8-a56e-c47dfbd54b8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.819562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986a09c2-277d-4d82-a7de-4fec5714dc04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.834198] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52653191-bc8a-8100-8df0-bbd2e38f032c, 'name': SearchDatastore_Task, 'duration_secs': 0.021767} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.869882] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.870496] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.871542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.871873] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.872357] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.873555] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3760b681-13ac-4e72-a305-36198a4f3243 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.876562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbde9ca-8a2d-4b0f-ae40-fdfa735253f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.888254] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb654406-b198-4f7c-82c8-bd4b58b75e10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.894630] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.894630] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 637.898260] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2adde03-1e0c-401e-aec2-6eebe431577e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.911094] env[68233]: DEBUG nova.compute.provider_tree [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.917685] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781875, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.917987] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 637.917987] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5283ea2a-0a4e-9792-4eca-a00756d1e522" [ 637.917987] env[68233]: _type = "Task" [ 637.917987] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.928557] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5283ea2a-0a4e-9792-4eca-a00756d1e522, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.953209] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 637.953209] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-896f5b75-e3da-44f6-af07-aa25e9f8a9b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.962032] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 637.962032] env[68233]: value = "task-2781876" [ 637.962032] env[68233]: _type = "Task" [ 637.962032] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.973369] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.025664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "a5468df9-c54d-4014-8002-ef82f111a7a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.025952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.122682] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 638.152108] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 638.152409] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.153095] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 638.153095] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.153095] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 638.153095] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 638.153312] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 638.153504] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 638.153673] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 638.153834] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 638.153998] env[68233]: DEBUG nova.virt.hardware [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 638.157060] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba5a1a5-44b5-46ff-86a0-5205c21c6afa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.165606] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ae66ea-d58a-4303-bd46-1305ffd26966 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.197404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.284284] env[68233]: DEBUG oslo_concurrency.lockutils [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] Releasing lock "refresh_cache-90d88fcb-6141-499c-b049-ddfc9e210d5c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.285169] env[68233]: DEBUG nova.compute.manager [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Received event network-vif-deleted-c00757db-7911-4503-932f-f300752512de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 638.285169] env[68233]: INFO nova.compute.manager [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Neutron deleted interface c00757db-7911-4503-932f-f300752512de; detaching it from the instance and deleting it from the info cache [ 638.285169] env[68233]: DEBUG nova.network.neutron [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.404158] env[68233]: DEBUG oslo_vmware.api [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781875, 'name': PowerOnVM_Task, 'duration_secs': 0.574471} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.404386] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 638.404603] env[68233]: INFO nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Took 10.29 seconds to spawn the instance on the hypervisor. [ 638.405051] env[68233]: DEBUG nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 638.405637] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa68e11d-d6e6-4667-ae7c-34d5af712f81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.419330] env[68233]: DEBUG nova.scheduler.client.report [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 638.432408] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5283ea2a-0a4e-9792-4eca-a00756d1e522, 'name': SearchDatastore_Task, 'duration_secs': 0.02229} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.433201] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b29be55d-64e4-471f-b5c2-92fa55f74961 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.440789] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 638.440789] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b2f9a9-a848-37ea-54ab-7925f9ffe162" [ 638.440789] env[68233]: _type = "Task" [ 638.440789] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.454270] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b2f9a9-a848-37ea-54ab-7925f9ffe162, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.472038] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781876, 'name': PowerOffVM_Task, 'duration_secs': 0.283705} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.472321] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 638.472504] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 638.713455] env[68233]: DEBUG nova.compute.manager [req-4b909aaa-f871-4e80-81ca-7fe30905f30a req-7ec71701-037c-49bc-92c3-dbd5531b8131 service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Received event network-vif-plugged-0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 638.713455] env[68233]: DEBUG oslo_concurrency.lockutils [req-4b909aaa-f871-4e80-81ca-7fe30905f30a req-7ec71701-037c-49bc-92c3-dbd5531b8131 service nova] Acquiring lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.713455] env[68233]: DEBUG oslo_concurrency.lockutils [req-4b909aaa-f871-4e80-81ca-7fe30905f30a req-7ec71701-037c-49bc-92c3-dbd5531b8131 service nova] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.713455] env[68233]: DEBUG oslo_concurrency.lockutils [req-4b909aaa-f871-4e80-81ca-7fe30905f30a req-7ec71701-037c-49bc-92c3-dbd5531b8131 service nova] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.713455] env[68233]: DEBUG nova.compute.manager [req-4b909aaa-f871-4e80-81ca-7fe30905f30a req-7ec71701-037c-49bc-92c3-dbd5531b8131 service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] No waiting events found dispatching network-vif-plugged-0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 638.713831] env[68233]: WARNING nova.compute.manager [req-4b909aaa-f871-4e80-81ca-7fe30905f30a req-7ec71701-037c-49bc-92c3-dbd5531b8131 service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Received unexpected event network-vif-plugged-0f4930ca-0156-459b-b9a9-dcc50b3efb21 for instance with vm_state building and task_state spawning. [ 638.783974] env[68233]: DEBUG nova.network.neutron [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Successfully updated port: 0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.795777] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b2f5185f-4d22-4638-aa15-b1668de0fd9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.804851] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27738310-1bb5-481b-868a-ca294c8f21f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.839020] env[68233]: DEBUG nova.compute.manager [req-731002a8-a352-4b56-ae92-b91f72def176 req-42fdabad-77de-4e2e-a37a-82aa5571b2ed service nova] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Detach interface failed, port_id=c00757db-7911-4503-932f-f300752512de, reason: Instance d19421ad-88d5-4479-a6e4-c6d59e863b31 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 638.924711] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.838s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.924711] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 638.933881] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.628s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.934180] env[68233]: DEBUG nova.objects.instance [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lazy-loading 'resources' on Instance uuid 34889575-95ea-451c-aa59-49a5f30d4e4c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 638.939348] env[68233]: INFO nova.compute.manager [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Took 33.91 seconds to build instance. [ 638.959604] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b2f9a9-a848-37ea-54ab-7925f9ffe162, 'name': SearchDatastore_Task, 'duration_secs': 0.016655} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.959604] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.959604] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 90d88fcb-6141-499c-b049-ddfc9e210d5c/90d88fcb-6141-499c-b049-ddfc9e210d5c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 638.959604] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a977c06d-c1a3-44eb-b067-26acab469ef0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.970012] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 638.970012] env[68233]: value = "task-2781877" [ 638.970012] env[68233]: _type = "Task" [ 638.970012] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.979141] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 638.979464] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.979685] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 638.979929] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.980119] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 638.980326] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 638.980585] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 638.980762] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 638.980945] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 638.981109] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 638.981301] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 638.989946] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-878b12cd-6bee-4d69-b23a-7955171e64cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.001225] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781877, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.008656] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 639.008656] env[68233]: value = "task-2781878" [ 639.008656] env[68233]: _type = "Task" [ 639.008656] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.018413] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781878, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.287449] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.287449] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquired lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.287449] env[68233]: DEBUG nova.network.neutron [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 639.438572] env[68233]: DEBUG nova.compute.utils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 639.443201] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 639.443201] env[68233]: DEBUG nova.network.neutron [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 639.445749] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75921171-3510-46a9-9905-f916b14d2702 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.641s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.481383] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781877, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.486241] env[68233]: DEBUG nova.policy [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47750dd9b4cd4900a2e10596d267bbf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd53bb0dba91d48ccb92d5fa899086f66', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 639.522776] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781878, 'name': ReconfigVM_Task, 'duration_secs': 0.384505} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.523190] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 639.839839] env[68233]: DEBUG nova.network.neutron [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.881825] env[68233]: DEBUG nova.network.neutron [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Successfully created port: 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.945795] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 639.950322] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 639.989671] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781877, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.890871} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.989671] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 90d88fcb-6141-499c-b049-ddfc9e210d5c/90d88fcb-6141-499c-b049-ddfc9e210d5c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 639.991032] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 639.991118] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edef858c-9483-4978-ba97-9552469ee2c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.004880] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 640.004880] env[68233]: value = "task-2781879" [ 640.004880] env[68233]: _type = "Task" [ 640.004880] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.018154] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.030501] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.030959] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.030959] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.031273] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.031273] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.031426] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.031617] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.031793] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.032016] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.032661] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.032661] env[68233]: DEBUG nova.virt.hardware [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.038876] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Reconfiguring VM instance instance-00000006 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 640.045538] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6357927-cec5-44ef-99eb-640ab2850b20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.069437] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 640.069437] env[68233]: value = "task-2781880" [ 640.069437] env[68233]: _type = "Task" [ 640.069437] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.081554] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.106788] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a714f31-d821-4f8e-8243-66d6da7a6b1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.114883] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f287d74-4f76-45fb-a347-f6cac62a98be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.121459] env[68233]: DEBUG nova.network.neutron [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Updating instance_info_cache with network_info: [{"id": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "address": "fa:16:3e:7c:ee:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4930ca-01", "ovs_interfaceid": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.160742] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9386156b-2c66-4f6e-a64a-af22a793b150 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.171644] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57c3c954-0d4b-4f63-ab57-ae6ee9c7c25a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.187356] env[68233]: DEBUG nova.compute.provider_tree [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.475120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.516610] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070067} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.516891] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 640.517702] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a617a8ee-1d94-4c22-81a3-76424ec8810d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.533242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "19a1441d-9621-4e6e-ac38-8ad08206facf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.533513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.533697] env[68233]: DEBUG nova.compute.manager [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 640.542885] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Reconfiguring VM instance instance-00000012 to attach disk [datastore2] 90d88fcb-6141-499c-b049-ddfc9e210d5c/90d88fcb-6141-499c-b049-ddfc9e210d5c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 640.543769] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80da1f84-12a3-42c8-8452-9485df960ef3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.546750] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc3a6594-5678-46c1-bffc-3c678791c4a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.566342] env[68233]: DEBUG nova.compute.manager [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 640.567018] env[68233]: DEBUG nova.objects.instance [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lazy-loading 'flavor' on Instance uuid 19a1441d-9621-4e6e-ac38-8ad08206facf {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 640.570578] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 640.570578] env[68233]: value = "task-2781881" [ 640.570578] env[68233]: _type = "Task" [ 640.570578] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.584102] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.587850] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781881, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.624507] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Releasing lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.624869] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Instance network_info: |[{"id": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "address": "fa:16:3e:7c:ee:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4930ca-01", "ovs_interfaceid": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 640.625326] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:ee:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f4930ca-0156-459b-b9a9-dcc50b3efb21', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.634616] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Creating folder: Project (f6e239df56cc40a0a4df5756613f31be). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.634738] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64251b78-21f1-4de9-ac69-18879718babb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.650724] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Created folder: Project (f6e239df56cc40a0a4df5756613f31be) in parent group-v559223. [ 640.650855] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Creating folder: Instances. Parent ref: group-v559282. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 640.651860] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12c08be2-a66c-4216-a6d8-27958ed56d1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.664236] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Created folder: Instances in parent group-v559282. [ 640.664580] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.664756] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.665086] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-734e29b7-b7a5-48ab-a5d2-1a91331ed2dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.688497] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.688497] env[68233]: value = "task-2781884" [ 640.688497] env[68233]: _type = "Task" [ 640.688497] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.695523] env[68233]: DEBUG nova.scheduler.client.report [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 640.702976] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781884, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.887730] env[68233]: DEBUG nova.compute.manager [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Received event network-changed-0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 640.888013] env[68233]: DEBUG nova.compute.manager [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Refreshing instance network info cache due to event network-changed-0f4930ca-0156-459b-b9a9-dcc50b3efb21. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 640.888627] env[68233]: DEBUG oslo_concurrency.lockutils [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] Acquiring lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.888790] env[68233]: DEBUG oslo_concurrency.lockutils [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] Acquired lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.888962] env[68233]: DEBUG nova.network.neutron [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Refreshing network info cache for port 0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 640.963470] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 640.991091] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 640.991502] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 640.991768] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 640.992078] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 640.992330] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 640.992577] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 640.993067] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 640.993214] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 640.993494] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 640.994057] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 640.994131] env[68233]: DEBUG nova.virt.hardware [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 640.995465] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5694943c-cc7b-45fc-8584-d941386dad81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.008172] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de757fe-228a-4d4d-aca4-f9a4a8e2849a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.098628] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781880, 'name': ReconfigVM_Task, 'duration_secs': 0.720662} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.098628] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.098878] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Reconfigured VM instance instance-00000006 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 641.099805] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7769f095-6e6b-45ba-9434-5a28ee6a3ea7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.125866] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f/2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.126267] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eee48bf8-791b-45e9-bb3a-496a9c0b51f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.150854] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 641.150854] env[68233]: value = "task-2781885" [ 641.150854] env[68233]: _type = "Task" [ 641.150854] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.163051] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781885, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.202038] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.268s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.204340] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781884, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.204633] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.297s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.205347] env[68233]: DEBUG nova.objects.instance [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lazy-loading 'resources' on Instance uuid 6105602a-b8eb-4128-a492-b60a9468018f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 641.234668] env[68233]: INFO nova.scheduler.client.report [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Deleted allocations for instance 34889575-95ea-451c-aa59-49a5f30d4e4c [ 641.454982] env[68233]: DEBUG nova.network.neutron [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Successfully updated port: 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 641.588819] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 641.597036] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26660ef5-f9fa-44da-a1a1-79501e79004c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.601527] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781881, 'name': ReconfigVM_Task, 'duration_secs': 0.624486} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.601846] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Reconfigured VM instance instance-00000012 to attach disk [datastore2] 90d88fcb-6141-499c-b049-ddfc9e210d5c/90d88fcb-6141-499c-b049-ddfc9e210d5c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 641.604879] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfe8f141-0e90-4d32-ad37-3b30e9ad44e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.610543] env[68233]: DEBUG oslo_vmware.api [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 641.610543] env[68233]: value = "task-2781886" [ 641.610543] env[68233]: _type = "Task" [ 641.610543] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.615825] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 641.615825] env[68233]: value = "task-2781887" [ 641.615825] env[68233]: _type = "Task" [ 641.615825] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.625291] env[68233]: DEBUG oslo_vmware.api [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781886, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.630692] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781887, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.662065] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781885, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.699177] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781884, 'name': CreateVM_Task, 'duration_secs': 0.559062} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.699287] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.700070] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.700227] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.700582] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.700841] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7913965-ea9f-4a76-b8db-dec749c5ab31 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.708067] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 641.708067] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e35cb3-09e6-8847-9041-471cf5f30b64" [ 641.708067] env[68233]: _type = "Task" [ 641.708067] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.720070] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e35cb3-09e6-8847-9041-471cf5f30b64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.724262] env[68233]: DEBUG nova.network.neutron [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Updated VIF entry in instance network info cache for port 0f4930ca-0156-459b-b9a9-dcc50b3efb21. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 641.725327] env[68233]: DEBUG nova.network.neutron [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Updating instance_info_cache with network_info: [{"id": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "address": "fa:16:3e:7c:ee:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4930ca-01", "ovs_interfaceid": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.744766] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9c3bcd59-f9bc-422c-8d0f-6b8d6158bc74 tempest-TenantUsagesTestJSON-1610052636 tempest-TenantUsagesTestJSON-1610052636-project-member] Lock "34889575-95ea-451c-aa59-49a5f30d4e4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.993s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.959435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.959435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.959435] env[68233]: DEBUG nova.network.neutron [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 642.127776] env[68233]: DEBUG oslo_vmware.api [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781886, 'name': PowerOffVM_Task, 'duration_secs': 0.233347} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.129513] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 642.129813] env[68233]: DEBUG nova.compute.manager [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 642.130679] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d6a029-a58d-417c-b928-275417b07812 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.137018] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781887, 'name': Rename_Task, 'duration_secs': 0.191095} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.139945] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.141287] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-094f34d6-454f-4e6a-8382-2d7bd701156d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.155540] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 642.155540] env[68233]: value = "task-2781888" [ 642.155540] env[68233]: _type = "Task" [ 642.155540] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.168984] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781888, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.169308] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781885, 'name': ReconfigVM_Task, 'duration_secs': 0.68277} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.169598] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f/2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.169867] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 642.221841] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e35cb3-09e6-8847-9041-471cf5f30b64, 'name': SearchDatastore_Task, 'duration_secs': 0.015097} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.222957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.223267] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.223515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.223992] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.223992] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.224375] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e1c8d92-9781-4be8-8d11-a341fe01d436 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.229317] env[68233]: DEBUG oslo_concurrency.lockutils [req-bc3dd542-3f7b-4c91-909b-82187beee1dd req-1dda1e64-75db-4269-9e46-cf9fd13eda1f service nova] Releasing lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.242909] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.242909] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.246187] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-876f0bef-e7f0-4a21-96ca-f9d1ca2d29e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.251720] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 642.251720] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5290dceb-82d4-2bd4-cff9-33b6910d579f" [ 642.251720] env[68233]: _type = "Task" [ 642.251720] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.261251] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5290dceb-82d4-2bd4-cff9-33b6910d579f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.277862] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47362a61-fe34-4a4e-87bf-adc4faddd6ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.285073] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0608b0bb-e375-4e94-aecc-67f8344f271b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.318347] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25726597-078e-4078-a726-4595829543c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.325575] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67423ee-170a-488f-8955-32bdbc29a014 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.339195] env[68233]: DEBUG nova.compute.provider_tree [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.510931] env[68233]: DEBUG nova.network.neutron [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.659015] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3903adee-200d-4eae-b68f-b92991eee2aa tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.123s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.669061] env[68233]: DEBUG oslo_vmware.api [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781888, 'name': PowerOnVM_Task, 'duration_secs': 0.479757} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.669422] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 642.669640] env[68233]: INFO nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Took 9.72 seconds to spawn the instance on the hypervisor. [ 642.669820] env[68233]: DEBUG nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 642.670654] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349f3666-5692-40cf-a10d-f212ccd775ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.684811] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f812f40f-2e37-4b64-9906-433b6478a8a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.712301] env[68233]: DEBUG nova.network.neutron [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.714902] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1707bd69-44fc-4f36-8aee-1c594555597b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.740017] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 642.762613] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5290dceb-82d4-2bd4-cff9-33b6910d579f, 'name': SearchDatastore_Task, 'duration_secs': 0.020723} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.763710] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a60fc518-c7b7-42c7-9d34-68d471d33607 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.770825] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 642.770825] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b0d785-c86c-84d7-f8f1-4fd1c7237044" [ 642.770825] env[68233]: _type = "Task" [ 642.770825] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.781129] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b0d785-c86c-84d7-f8f1-4fd1c7237044, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.845886] env[68233]: DEBUG nova.scheduler.client.report [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 642.948680] env[68233]: DEBUG nova.compute.manager [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-vif-plugged-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 642.948765] env[68233]: DEBUG oslo_concurrency.lockutils [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] Acquiring lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 642.948946] env[68233]: DEBUG oslo_concurrency.lockutils [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.949130] env[68233]: DEBUG oslo_concurrency.lockutils [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.949295] env[68233]: DEBUG nova.compute.manager [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] No waiting events found dispatching network-vif-plugged-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 642.950623] env[68233]: WARNING nova.compute.manager [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received unexpected event network-vif-plugged-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 for instance with vm_state building and task_state spawning. [ 642.950623] env[68233]: DEBUG nova.compute.manager [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 642.950623] env[68233]: DEBUG nova.compute.manager [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing instance network info cache due to event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 642.950623] env[68233]: DEBUG oslo_concurrency.lockutils [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] Acquiring lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.198973] env[68233]: INFO nova.compute.manager [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Took 36.29 seconds to build instance. [ 643.220080] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.220779] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Instance network_info: |[{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 643.221118] env[68233]: DEBUG oslo_concurrency.lockutils [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] Acquired lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 643.221718] env[68233]: DEBUG nova.network.neutron [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 643.222942] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:06:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2dd0d9e1-b8b6-464b-a497-b32b7ff64400', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.230583] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Creating folder: Project (d53bb0dba91d48ccb92d5fa899086f66). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.231353] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9916242e-b08f-44cd-a797-25dcc1201f60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.248178] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Created folder: Project (d53bb0dba91d48ccb92d5fa899086f66) in parent group-v559223. [ 643.248385] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Creating folder: Instances. Parent ref: group-v559285. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.248638] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-120852ef-9990-4758-bcc0-6973461fa578 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.262075] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Created folder: Instances in parent group-v559285. [ 643.262075] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 643.262075] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.262075] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7b60f13-7ff6-409e-92e9-c1e3e4740f6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.289236] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b0d785-c86c-84d7-f8f1-4fd1c7237044, 'name': SearchDatastore_Task, 'duration_secs': 0.018067} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.289901] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.292220] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d1577f70-4fb6-4b0b-9d41-8d245c26c90c/d1577f70-4fb6-4b0b-9d41-8d245c26c90c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 643.292220] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.292220] env[68233]: value = "task-2781891" [ 643.292220] env[68233]: _type = "Task" [ 643.292220] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.292220] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36f4b42a-9285-418c-ae66-b607fd56c610 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.301615] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 643.301615] env[68233]: value = "task-2781892" [ 643.301615] env[68233]: _type = "Task" [ 643.301615] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.308893] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781892, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.333082] env[68233]: DEBUG nova.network.neutron [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Port 749b7b47-864a-4c70-804b-9e57cc1b14a5 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 643.350236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.353748] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.193s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.354876] env[68233]: INFO nova.compute.claims [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 643.376323] env[68233]: INFO nova.scheduler.client.report [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Deleted allocations for instance 6105602a-b8eb-4128-a492-b60a9468018f [ 643.701295] env[68233]: DEBUG oslo_concurrency.lockutils [None req-20693d71-1223-4d58-a73a-a8fe12619e0b tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.896s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 643.802960] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781891, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.812948] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781892, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.886584] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c60612fb-3a7d-49cc-a777-bb27ca88858a tempest-ServerDiagnosticsV248Test-1962870759 tempest-ServerDiagnosticsV248Test-1962870759-project-member] Lock "6105602a-b8eb-4128-a492-b60a9468018f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.213s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.162244] env[68233]: DEBUG nova.network.neutron [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updated VIF entry in instance network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 644.162593] env[68233]: DEBUG nova.network.neutron [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.204603] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 644.304998] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781891, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.314389] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781892, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542326} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.315036] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d1577f70-4fb6-4b0b-9d41-8d245c26c90c/d1577f70-4fb6-4b0b-9d41-8d245c26c90c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 644.315511] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.315823] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9088a3d9-7902-491f-a773-5254c13d3bff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.324979] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 644.324979] env[68233]: value = "task-2781893" [ 644.324979] env[68233]: _type = "Task" [ 644.324979] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.338222] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781893, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.358053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.358199] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.358365] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.627037] env[68233]: DEBUG nova.compute.manager [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 644.627960] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6a1b3b-c5b3-4e21-af28-55004d788783 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.665425] env[68233]: DEBUG oslo_concurrency.lockutils [req-aa90476c-92b4-426c-a595-557a76fc86da req-37eb08fe-ccc6-40cf-85f4-ef8d1bec9802 service nova] Releasing lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.732463] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 644.808053] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781891, 'name': CreateVM_Task, 'duration_secs': 1.424567} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.809569] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.809569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.809569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.809569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.809984] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7730064f-b90c-43fe-bebc-3a29288de4e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.815875] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 644.815875] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527bdb80-7e72-3c3e-da06-9cf0ba21220d" [ 644.815875] env[68233]: _type = "Task" [ 644.815875] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.825078] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527bdb80-7e72-3c3e-da06-9cf0ba21220d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.837604] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781893, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06844} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.837604] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.838989] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032ad981-6d49-4c84-bbfb-b12613447da5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.864707] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] d1577f70-4fb6-4b0b-9d41-8d245c26c90c/d1577f70-4fb6-4b0b-9d41-8d245c26c90c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.869181] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b527a7bf-0271-4986-b951-78ff191d78a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.891049] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 644.891049] env[68233]: value = "task-2781894" [ 644.891049] env[68233]: _type = "Task" [ 644.891049] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.910414] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781894, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.928762] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcd46e8-82c9-4578-a98b-35a7239f04c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.939223] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafebe5f-6949-43a1-9a8e-7cf4ccace4a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.970288] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06569315-764b-4f3a-bdf7-281440d801c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.977933] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf89160f-0396-4724-bcaa-ed4c26c8f1c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.995130] env[68233]: DEBUG nova.compute.provider_tree [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.151037] env[68233]: INFO nova.compute.manager [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] instance snapshotting [ 645.151037] env[68233]: WARNING nova.compute.manager [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 645.152291] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878ab660-aac8-475d-94d9-f1375d186226 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.176360] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfd160c-907d-4c05-83c3-410938e5752b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.329374] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527bdb80-7e72-3c3e-da06-9cf0ba21220d, 'name': SearchDatastore_Task, 'duration_secs': 0.035889} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.329761] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.330025] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 645.330371] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.330586] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.330834] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 645.331410] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ee73b29e-f102-4d06-ad1c-022e7ac2f6e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.340789] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 645.340948] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 645.342236] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba5d9744-782e-42d1-be18-cfa5e9489521 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.348107] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 645.348107] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218ce05-e40a-75b7-a6df-3a3bed8becb6" [ 645.348107] env[68233]: _type = "Task" [ 645.348107] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.358962] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218ce05-e40a-75b7-a6df-3a3bed8becb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.400695] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781894, 'name': ReconfigVM_Task, 'duration_secs': 0.298638} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.401020] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Reconfigured VM instance instance-00000013 to attach disk [datastore2] d1577f70-4fb6-4b0b-9d41-8d245c26c90c/d1577f70-4fb6-4b0b-9d41-8d245c26c90c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.401653] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5010f18d-4797-4b82-acc3-42778e867151 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.408018] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 645.408018] env[68233]: value = "task-2781895" [ 645.408018] env[68233]: _type = "Task" [ 645.408018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.417016] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781895, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.498699] env[68233]: DEBUG nova.scheduler.client.report [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 645.687335] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.687335] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.687335] env[68233]: DEBUG nova.network.neutron [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.688025] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 645.688441] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9b17f6b2-7eb4-42d6-beb1-3683a623dd81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.695990] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 645.695990] env[68233]: value = "task-2781896" [ 645.695990] env[68233]: _type = "Task" [ 645.695990] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.705957] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781896, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.859965] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218ce05-e40a-75b7-a6df-3a3bed8becb6, 'name': SearchDatastore_Task, 'duration_secs': 0.011516} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.861256] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d29fdf-2dc4-4daa-8b39-20c234b93733 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.868782] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 645.868782] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d29d4e-00e6-76b8-f133-c57cf5f88d95" [ 645.868782] env[68233]: _type = "Task" [ 645.868782] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.878429] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d29d4e-00e6-76b8-f133-c57cf5f88d95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.918640] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781895, 'name': Rename_Task, 'duration_secs': 0.152894} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.918973] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 645.919268] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8582b3ce-1cb8-48c4-bc1a-780b4e0b38f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.926435] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 645.926435] env[68233]: value = "task-2781897" [ 645.926435] env[68233]: _type = "Task" [ 645.926435] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.938723] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.006973] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.007571] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 646.013181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.614s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.013181] env[68233]: DEBUG nova.objects.instance [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 646.211738] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781896, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.271141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "90d88fcb-6141-499c-b049-ddfc9e210d5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.271439] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.271666] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "90d88fcb-6141-499c-b049-ddfc9e210d5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.271886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.273729] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.276942] env[68233]: INFO nova.compute.manager [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Terminating instance [ 646.378862] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d29d4e-00e6-76b8-f133-c57cf5f88d95, 'name': SearchDatastore_Task, 'duration_secs': 0.013783} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.379214] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.379406] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ba4ad2f8-fad1-45be-b2b1-68c3a58f3750/ba4ad2f8-fad1-45be-b2b1-68c3a58f3750.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 646.379670] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5a58cd8-4b78-4d64-a6e3-b4881e1fbd94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.387181] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 646.387181] env[68233]: value = "task-2781898" [ 646.387181] env[68233]: _type = "Task" [ 646.387181] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.396877] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.435699] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781897, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.516515] env[68233]: DEBUG nova.compute.utils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 646.521259] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 646.521259] env[68233]: DEBUG nova.network.neutron [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 646.565866] env[68233]: DEBUG nova.policy [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc3bd33b2e2143f8be165a10e4665c7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '963898fb1cae4e6e9438ace9dd437f9e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 646.675558] env[68233]: DEBUG nova.network.neutron [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance_info_cache with network_info: [{"id": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "address": "fa:16:3e:89:49:e6", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749b7b47-86", "ovs_interfaceid": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.716063] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781896, 'name': CreateSnapshot_Task, 'duration_secs': 0.889857} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.716063] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 646.716985] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c6e147-33ad-4690-ae1e-4fc7c76c0d57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.780835] env[68233]: DEBUG nova.compute.manager [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 646.781213] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 646.784373] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa40d8d-e699-431e-a0e8-10ef698a37db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.795969] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 646.799968] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-482ca093-8f87-484a-881e-ca93f34662f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.807232] env[68233]: DEBUG oslo_vmware.api [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 646.807232] env[68233]: value = "task-2781899" [ 646.807232] env[68233]: _type = "Task" [ 646.807232] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.817171] env[68233]: DEBUG oslo_vmware.api [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.904467] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781898, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.910426] env[68233]: DEBUG nova.network.neutron [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Successfully created port: 757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.943362] env[68233]: DEBUG oslo_vmware.api [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781897, 'name': PowerOnVM_Task, 'duration_secs': 0.567862} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.943732] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 646.944018] env[68233]: INFO nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Took 8.82 seconds to spawn the instance on the hypervisor. [ 646.944262] env[68233]: DEBUG nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 646.945201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216da2cf-9ac9-4996-9e66-2ddc078e475d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.025335] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5cf53c2-b059-41ac-b49a-1589e2fa5e44 tempest-ServersAdmin275Test-543470896 tempest-ServersAdmin275Test-543470896-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.026925] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 647.030040] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.330s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.030040] env[68233]: DEBUG nova.objects.instance [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 647.182246] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 647.243692] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 647.244785] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-28a1714c-9360-410c-9f80-54f34a851700 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.253746] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 647.253746] env[68233]: value = "task-2781900" [ 647.253746] env[68233]: _type = "Task" [ 647.253746] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.262397] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781900, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.318551] env[68233]: DEBUG oslo_vmware.api [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781899, 'name': PowerOffVM_Task, 'duration_secs': 0.311586} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.319010] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 647.319297] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 647.320796] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d554db23-0dd9-4438-ae63-c2bd5472116b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.405391] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619223} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.405391] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ba4ad2f8-fad1-45be-b2b1-68c3a58f3750/ba4ad2f8-fad1-45be-b2b1-68c3a58f3750.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 647.405391] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 647.405391] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e5c53ca-ed1f-41cc-9313-2cf5581ccc1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.407168] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 647.407638] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 647.408090] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleting the datastore file [datastore2] 90d88fcb-6141-499c-b049-ddfc9e210d5c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 647.408483] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abffc089-ad1a-47aa-9da1-dd3ca8eef6be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.415110] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 647.415110] env[68233]: value = "task-2781902" [ 647.415110] env[68233]: _type = "Task" [ 647.415110] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.416871] env[68233]: DEBUG oslo_vmware.api [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 647.416871] env[68233]: value = "task-2781903" [ 647.416871] env[68233]: _type = "Task" [ 647.416871] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.429696] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781902, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.437140] env[68233]: DEBUG oslo_vmware.api [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781903, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.468077] env[68233]: INFO nova.compute.manager [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Took 38.85 seconds to build instance. [ 647.710180] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e495c3d-fdcf-4bc9-9975-ed5908e5160e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.735478] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4fd779-3c37-411f-9a71-10a604e9d108 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.741612] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "f7a1bfc5-7141-4764-b3fe-08d06020209a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.741890] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.747098] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 647.762944] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781900, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.932980] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781902, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069655} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.937412] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.938406] env[68233]: DEBUG oslo_vmware.api [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781903, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392329} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.939572] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d587ad7-db18-4fd6-b7bb-f90051863d9c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.942939] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 647.943192] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 647.943535] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 647.944072] env[68233]: INFO nova.compute.manager [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 647.944857] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 647.944857] env[68233]: DEBUG nova.compute.manager [-] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 647.944857] env[68233]: DEBUG nova.network.neutron [-] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 647.971831] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] ba4ad2f8-fad1-45be-b2b1-68c3a58f3750/ba4ad2f8-fad1-45be-b2b1-68c3a58f3750.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 647.971831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cc9c7ce0-2d94-4500-9b28-c07aebe57b66 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.839s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.971831] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6d60a92-ef52-4358-9c53-072ec0fc4f88 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.994472] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 647.994472] env[68233]: value = "task-2781904" [ 647.994472] env[68233]: _type = "Task" [ 647.994472] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.005906] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781904, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.045492] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4c74f699-739f-4e65-b169-4d82c72eb81d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 648.046671] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.413s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 648.046901] env[68233]: DEBUG nova.objects.instance [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lazy-loading 'resources' on Instance uuid e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 648.048764] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 648.086850] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 648.087165] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.087332] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 648.087536] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.087697] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 648.087905] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 648.089311] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 648.090085] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 648.090975] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 648.090975] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 648.090975] env[68233]: DEBUG nova.virt.hardware [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 648.091801] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17cc24f-aeeb-48ae-9bd4-a99039940b7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.101199] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2af215-dfe8-48c0-b698-226095e6844b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.255684] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 648.257629] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eb893d45-dbe3-46a6-a8e0-1ac7a2c191f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.272671] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781900, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.274716] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 648.274716] env[68233]: value = "task-2781905" [ 648.274716] env[68233]: _type = "Task" [ 648.274716] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.285180] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781905, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.489602] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 648.496208] env[68233]: DEBUG nova.network.neutron [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Successfully updated port: 757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.509459] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781904, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.768358] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781900, 'name': CloneVM_Task, 'duration_secs': 1.475002} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.768621] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Created linked-clone VM from snapshot [ 648.769383] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e86be06-c293-45de-8cb1-944f21446b56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.782731] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Uploading image c5fcb8c8-99d6-4008-bbcd-dd80dc0016f1 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 648.789351] env[68233]: DEBUG oslo_vmware.api [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2781905, 'name': PowerOnVM_Task, 'duration_secs': 0.397447} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.791835] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.793031] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ece8033-8445-4010-a686-92b70122ff87 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance '2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 648.811317] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 648.811317] env[68233]: value = "vm-559289" [ 648.811317] env[68233]: _type = "VirtualMachine" [ 648.811317] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 648.811608] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ea267f6b-59fd-4c8f-89ec-faaadc87d66c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.822686] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease: (returnval){ [ 648.822686] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52575886-1ef0-7543-b1a0-9f508f9b5d0e" [ 648.822686] env[68233]: _type = "HttpNfcLease" [ 648.822686] env[68233]: } obtained for exporting VM: (result){ [ 648.822686] env[68233]: value = "vm-559289" [ 648.822686] env[68233]: _type = "VirtualMachine" [ 648.822686] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 648.822686] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the lease: (returnval){ [ 648.822686] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52575886-1ef0-7543-b1a0-9f508f9b5d0e" [ 648.822686] env[68233]: _type = "HttpNfcLease" [ 648.822686] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 648.829562] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 648.829562] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52575886-1ef0-7543-b1a0-9f508f9b5d0e" [ 648.829562] env[68233]: _type = "HttpNfcLease" [ 648.829562] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 648.990651] env[68233]: DEBUG nova.network.neutron [-] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.006809] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.006956] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.007193] env[68233]: DEBUG nova.network.neutron [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.017072] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781904, 'name': ReconfigVM_Task, 'duration_secs': 0.634706} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.017428] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Reconfigured VM instance instance-00000014 to attach disk [datastore2] ba4ad2f8-fad1-45be-b2b1-68c3a58f3750/ba4ad2f8-fad1-45be-b2b1-68c3a58f3750.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 649.018117] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bea67d45-2bd7-4ae8-b278-0157d527cd45 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.029189] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 649.029189] env[68233]: value = "task-2781907" [ 649.029189] env[68233]: _type = "Task" [ 649.029189] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.030284] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.042842] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781907, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.078977] env[68233]: DEBUG nova.compute.manager [req-23a80f25-0dc2-4375-a94f-f13ef585179a req-09c3d048-46fd-4847-87d6-bb046c4d882a service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Received event network-vif-plugged-757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 649.080975] env[68233]: DEBUG oslo_concurrency.lockutils [req-23a80f25-0dc2-4375-a94f-f13ef585179a req-09c3d048-46fd-4847-87d6-bb046c4d882a service nova] Acquiring lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.080975] env[68233]: DEBUG oslo_concurrency.lockutils [req-23a80f25-0dc2-4375-a94f-f13ef585179a req-09c3d048-46fd-4847-87d6-bb046c4d882a service nova] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.080975] env[68233]: DEBUG oslo_concurrency.lockutils [req-23a80f25-0dc2-4375-a94f-f13ef585179a req-09c3d048-46fd-4847-87d6-bb046c4d882a service nova] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.080975] env[68233]: DEBUG nova.compute.manager [req-23a80f25-0dc2-4375-a94f-f13ef585179a req-09c3d048-46fd-4847-87d6-bb046c4d882a service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] No waiting events found dispatching network-vif-plugged-757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 649.080975] env[68233]: WARNING nova.compute.manager [req-23a80f25-0dc2-4375-a94f-f13ef585179a req-09c3d048-46fd-4847-87d6-bb046c4d882a service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Received unexpected event network-vif-plugged-757e5be0-4fd2-40d3-b5fd-a6667126afc1 for instance with vm_state building and task_state spawning. [ 649.170824] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002026cf-3ab8-407f-a19c-dd1866fb06bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.177445] env[68233]: DEBUG nova.compute.manager [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Received event network-changed {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 649.177445] env[68233]: DEBUG nova.compute.manager [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Refreshing instance network info cache due to event network-changed. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 649.177445] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] Acquiring lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.177445] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] Acquired lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.177445] env[68233]: DEBUG nova.network.neutron [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 649.187340] env[68233]: DEBUG nova.compute.manager [req-67cf3848-47d5-42de-b9e1-e6a9066c6d57 req-e81d2d54-c803-4515-a8a0-137f4953598d service nova] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Received event network-vif-deleted-dfc4fcf9-323d-437d-a092-4763f3b7d0e5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 649.190802] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a8ed9f-50bb-4878-b4ae-cf80bc9517af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.222474] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b35f383-59e3-4450-a860-950aabf694c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.231159] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4202817a-b459-4b09-a082-fc78a0632d3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.245293] env[68233]: DEBUG nova.compute.provider_tree [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.330408] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 649.330408] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52575886-1ef0-7543-b1a0-9f508f9b5d0e" [ 649.330408] env[68233]: _type = "HttpNfcLease" [ 649.330408] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 649.330805] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 649.330805] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52575886-1ef0-7543-b1a0-9f508f9b5d0e" [ 649.330805] env[68233]: _type = "HttpNfcLease" [ 649.330805] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 649.331506] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f8564a-c83a-44c3-9bb2-9e4e673a63e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.340374] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52780665-3758-5bcf-2e09-3d80a20fe6ff/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 649.340438] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52780665-3758-5bcf-2e09-3d80a20fe6ff/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 649.492232] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-428e1d00-3f2f-44d3-84e8-49c099a6cce0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.494374] env[68233]: INFO nova.compute.manager [-] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Took 1.55 seconds to deallocate network for instance. [ 649.547816] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781907, 'name': Rename_Task, 'duration_secs': 0.167789} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.547816] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.547816] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aef6b9c4-750e-42fc-9e7c-55c20cd6ada9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.557439] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 649.557439] env[68233]: value = "task-2781908" [ 649.557439] env[68233]: _type = "Task" [ 649.557439] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.564262] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781908, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.573666] env[68233]: DEBUG nova.network.neutron [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.748370] env[68233]: DEBUG nova.scheduler.client.report [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.753280] env[68233]: DEBUG nova.network.neutron [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Updating instance_info_cache with network_info: [{"id": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "address": "fa:16:3e:95:fe:ef", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap757e5be0-4f", "ovs_interfaceid": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.002284] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.072223] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781908, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.124597] env[68233]: DEBUG nova.network.neutron [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Updating instance_info_cache with network_info: [{"id": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "address": "fa:16:3e:7c:ee:4e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f4930ca-01", "ovs_interfaceid": "0f4930ca-0156-459b-b9a9-dcc50b3efb21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.220618] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.220933] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.221210] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.221842] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.221842] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.224169] env[68233]: INFO nova.compute.manager [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Terminating instance [ 650.257029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.260549] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.260891] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Instance network_info: |[{"id": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "address": "fa:16:3e:95:fe:ef", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap757e5be0-4f", "ovs_interfaceid": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.261509] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.666s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.263488] env[68233]: INFO nova.compute.claims [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.267501] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:fe:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0dd3c126-9d86-4f9a-b81c-e9627c7a5401', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '757e5be0-4fd2-40d3-b5fd-a6667126afc1', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.276912] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.277279] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.278097] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55c32906-d8d8-4c66-b2a2-52a7bc2d44a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.298023] env[68233]: INFO nova.scheduler.client.report [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Deleted allocations for instance e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67 [ 650.300345] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.300345] env[68233]: value = "task-2781909" [ 650.300345] env[68233]: _type = "Task" [ 650.300345] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.317943] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781909, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.332595] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.332897] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.578847] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781908, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.632018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36f0379d-af07-47de-b227-77fa5ca24462 tempest-ServerExternalEventsTest-1185282425 tempest-ServerExternalEventsTest-1185282425-project] Releasing lock "refresh_cache-d1577f70-4fb6-4b0b-9d41-8d245c26c90c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.731023] env[68233]: DEBUG nova.compute.manager [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 650.731023] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.731023] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7250d871-ad7e-48bc-a1df-31ad31c7e91a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.738837] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 650.738837] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-768203c6-0032-48da-ae00-7d0d1f31d5e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.750262] env[68233]: DEBUG oslo_vmware.api [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 650.750262] env[68233]: value = "task-2781910" [ 650.750262] env[68233]: _type = "Task" [ 650.750262] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.758299] env[68233]: DEBUG oslo_vmware.api [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781910, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.814367] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781909, 'name': CreateVM_Task, 'duration_secs': 0.512813} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.814975] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d642c59f-4996-4e20-9d9a-37941afda09b tempest-ServersAdmin275Test-1718965882 tempest-ServersAdmin275Test-1718965882-project-member] Lock "e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.227s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.815914] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.816833] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.817015] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.817322] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 650.818050] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c0b3662-e4d6-4c08-9b16-08cc376b014e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.824317] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 650.824317] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2c8ce-7c85-ca63-a214-4536b5adeee6" [ 650.824317] env[68233]: _type = "Task" [ 650.824317] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.835113] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2c8ce-7c85-ca63-a214-4536b5adeee6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.840568] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.840849] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.841050] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.841246] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.841435] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.841623] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 650.841775] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 650.841902] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 651.071524] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781908, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.259393] env[68233]: DEBUG oslo_vmware.api [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781910, 'name': PowerOffVM_Task, 'duration_secs': 0.223889} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.259829] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.260096] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.260419] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-379a5b19-7387-4264-9e33-944a93946167 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.337900] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2c8ce-7c85-ca63-a214-4536b5adeee6, 'name': SearchDatastore_Task, 'duration_secs': 0.013423} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.340299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.341529] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.343030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.343030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.343030] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.343030] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.343349] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.343349] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Deleting the datastore file [datastore2] d1577f70-4fb6-4b0b-9d41-8d245c26c90c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.343539] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1052c82-818b-4aa8-9588-02fbdcc792d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.346820] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.347207] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-803a321b-5264-4249-a5bb-f92d454da3e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.356606] env[68233]: DEBUG oslo_vmware.api [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for the task: (returnval){ [ 651.356606] env[68233]: value = "task-2781912" [ 651.356606] env[68233]: _type = "Task" [ 651.356606] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.357992] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.358094] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.362615] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0723bc6-a28a-4923-8e11-d3d3e36fc846 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.371783] env[68233]: DEBUG oslo_vmware.api [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.372203] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 651.372203] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f01ba1-a4f8-12fa-4869-787e31be6cdd" [ 651.372203] env[68233]: _type = "Task" [ 651.372203] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.377141] env[68233]: DEBUG nova.compute.manager [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Received event network-changed-757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 651.377350] env[68233]: DEBUG nova.compute.manager [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Refreshing instance network info cache due to event network-changed-757e5be0-4fd2-40d3-b5fd-a6667126afc1. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 651.377588] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] Acquiring lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.377754] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] Acquired lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.377919] env[68233]: DEBUG nova.network.neutron [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Refreshing network info cache for port 757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.388851] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f01ba1-a4f8-12fa-4869-787e31be6cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.011565} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.390256] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f1c8982-e672-47f3-9242-273bf017d5ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.399362] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 651.399362] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52526d53-a2ab-19a2-129e-e71c2754e910" [ 651.399362] env[68233]: _type = "Task" [ 651.399362] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.409855] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52526d53-a2ab-19a2-129e-e71c2754e910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.420809] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.422036] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.422036] env[68233]: DEBUG nova.compute.manager [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Going to confirm migration 1 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 651.569632] env[68233]: DEBUG oslo_vmware.api [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2781908, 'name': PowerOnVM_Task, 'duration_secs': 1.776086} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.569933] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 651.570147] env[68233]: INFO nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Took 10.61 seconds to spawn the instance on the hypervisor. [ 651.570329] env[68233]: DEBUG nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 651.571182] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb8261d-b0ab-4107-9a25-1462b1f02392 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.842993] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e91113c-d19b-4568-bf27-811af69e88bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.855697] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e41cac-5357-4848-9145-e2ce04800ddc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.867974] env[68233]: DEBUG oslo_vmware.api [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Task: {'id': task-2781912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23484} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.891680] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 651.891913] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 651.892107] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 651.892295] env[68233]: INFO nova.compute.manager [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Took 1.16 seconds to destroy the instance on the hypervisor. [ 651.892542] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.895032] env[68233]: DEBUG nova.compute.manager [-] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 651.895139] env[68233]: DEBUG nova.network.neutron [-] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 651.897060] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ae4904-3155-48d1-9d82-f4a161776a61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.906722] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959e8b7d-4aec-461f-ba77-8fec1da4ce03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.916944] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52526d53-a2ab-19a2-129e-e71c2754e910, 'name': SearchDatastore_Task, 'duration_secs': 0.012651} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.919426] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.919426] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/2812bf7c-5117-4fd9-9330-0cc94277bf5d.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 651.919426] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8ced841-b15a-477b-8900-b9a01d5753e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.939084] env[68233]: DEBUG nova.compute.provider_tree [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.948580] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 651.948580] env[68233]: value = "task-2781913" [ 651.948580] env[68233]: _type = "Task" [ 651.948580] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.958981] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.069573] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.070090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.070211] env[68233]: DEBUG nova.network.neutron [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 652.070602] env[68233]: DEBUG nova.objects.instance [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lazy-loading 'info_cache' on Instance uuid 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.099727] env[68233]: INFO nova.compute.manager [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Took 40.41 seconds to build instance. [ 652.255164] env[68233]: DEBUG nova.network.neutron [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Updated VIF entry in instance network info cache for port 757e5be0-4fd2-40d3-b5fd-a6667126afc1. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 652.255474] env[68233]: DEBUG nova.network.neutron [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Updating instance_info_cache with network_info: [{"id": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "address": "fa:16:3e:95:fe:ef", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap757e5be0-4f", "ovs_interfaceid": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.445277] env[68233]: DEBUG nova.scheduler.client.report [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.463693] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781913, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.602702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8b5e8f0-2b3f-476f-bd41-cec6782dda50 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.356s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.758622] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb23d933-056d-42b2-9a60-b790e8265615 req-c34db1d8-7b6c-4499-a41d-ad0296915cac service nova] Releasing lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.770640] env[68233]: DEBUG nova.network.neutron [-] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 652.956701] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.957270] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.963543] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.377s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.965127] env[68233]: INFO nova.compute.claims [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 652.969113] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57849} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.969113] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/2812bf7c-5117-4fd9-9330-0cc94277bf5d.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 652.969113] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 652.969113] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40e96a94-63d4-42f5-8b7b-4980648486a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.977119] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 652.977119] env[68233]: value = "task-2781914" [ 652.977119] env[68233]: _type = "Task" [ 652.977119] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.990483] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781914, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.105181] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 653.272471] env[68233]: INFO nova.compute.manager [-] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Took 1.38 seconds to deallocate network for instance. [ 653.415409] env[68233]: DEBUG nova.network.neutron [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance_info_cache with network_info: [{"id": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "address": "fa:16:3e:89:49:e6", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.50", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749b7b47-86", "ovs_interfaceid": "749b7b47-864a-4c70-804b-9e57cc1b14a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 653.470450] env[68233]: DEBUG nova.compute.utils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.471982] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.472118] env[68233]: DEBUG nova.network.neutron [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.491944] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781914, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105531} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.492232] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.493324] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e853add-a3f4-4486-8115-7b7bb401757e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.515885] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/2812bf7c-5117-4fd9-9330-0cc94277bf5d.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.518637] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-533303ef-2340-4ee1-acfc-22edfbadbf6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.539539] env[68233]: DEBUG nova.policy [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43c48242abf540fe99d95f3d2df541ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baab6817c97645bcae2e08502b7f96db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 653.549439] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 653.549439] env[68233]: value = "task-2781915" [ 653.549439] env[68233]: _type = "Task" [ 653.549439] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.558549] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781915, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.638405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.778927] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.787743] env[68233]: DEBUG nova.compute.manager [req-8c4dda5b-a35e-4ef7-896b-3060db2a56e6 req-01d5f998-7cef-4e1f-942a-66b0a2f22e18 service nova] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Received event network-vif-deleted-0f4930ca-0156-459b-b9a9-dcc50b3efb21 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 653.918668] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.919212] env[68233]: DEBUG nova.objects.instance [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lazy-loading 'migration_context' on Instance uuid 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 653.980419] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 654.065177] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781915, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.280565] env[68233]: DEBUG nova.network.neutron [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Successfully created port: 5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.422699] env[68233]: DEBUG nova.objects.base [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Object Instance<2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f> lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 654.425389] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ed661b-375b-4cd6-9524-13831625e073 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.454590] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2465f70c-6f84-40a9-8e51-9d193930957e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.460595] env[68233]: DEBUG oslo_vmware.api [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 654.460595] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529dda73-1c83-5864-b45a-e0b5dc60aa32" [ 654.460595] env[68233]: _type = "Task" [ 654.460595] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.472125] env[68233]: DEBUG oslo_vmware.api [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529dda73-1c83-5864-b45a-e0b5dc60aa32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.562192] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781915, 'name': ReconfigVM_Task, 'duration_secs': 0.559752} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.563458] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/2812bf7c-5117-4fd9-9330-0cc94277bf5d.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 654.565177] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a408bdda-cea8-45f1-a508-23c3c498cadb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.570993] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e769e4-56d3-4c97-9e88-2d6c83f4f99d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.582931] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aecbd0-d0fe-4d95-905d-02d0ab3e1209 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.587811] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 654.587811] env[68233]: value = "task-2781916" [ 654.587811] env[68233]: _type = "Task" [ 654.587811] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.622984] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88999be1-98c1-48f9-9a65-1eca45280945 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.631658] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781916, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.639436] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fc718c-44f1-4000-8e4b-0aa032db9cf4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.654934] env[68233]: DEBUG nova.compute.provider_tree [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 654.840807] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "86528c8b-b51e-480d-a7bf-013d990d51ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.841047] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.976583] env[68233]: DEBUG oslo_vmware.api [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529dda73-1c83-5864-b45a-e0b5dc60aa32, 'name': SearchDatastore_Task, 'duration_secs': 0.008924} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.976583] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.995662] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 655.031550] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 655.031883] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 655.031963] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 655.032546] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 655.032546] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 655.032546] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 655.032756] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 655.032965] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 655.033196] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 655.033381] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 655.033582] env[68233]: DEBUG nova.virt.hardware [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 655.035219] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e39b9e-c550-4abd-8ab9-6bf12fbb9801 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.043704] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c1f191-0b43-47c3-a5be-1a02cd2cc796 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.100052] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781916, 'name': Rename_Task, 'duration_secs': 0.209115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.100052] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.100052] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b6840b9-b748-49a5-bd14-c4861c2d59c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.106042] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 655.106042] env[68233]: value = "task-2781917" [ 655.106042] env[68233]: _type = "Task" [ 655.106042] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.115015] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.161295] env[68233]: DEBUG nova.scheduler.client.report [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 655.622179] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781917, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.669917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.670338] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 655.673156] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.445s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.673389] env[68233]: DEBUG nova.objects.instance [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lazy-loading 'resources' on Instance uuid 68a4e635-381d-4dc2-879c-5581cd5e189a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 656.047631] env[68233]: DEBUG nova.network.neutron [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Successfully updated port: 5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 656.120344] env[68233]: DEBUG oslo_vmware.api [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781917, 'name': PowerOnVM_Task, 'duration_secs': 0.668837} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.121419] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.121419] env[68233]: INFO nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Took 8.07 seconds to spawn the instance on the hypervisor. [ 656.121419] env[68233]: DEBUG nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.122180] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792fa687-1270-421c-954d-b524a7b493e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.177058] env[68233]: DEBUG nova.compute.utils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 656.182793] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 656.183144] env[68233]: DEBUG nova.network.neutron [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 656.249284] env[68233]: DEBUG nova.policy [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93a6beddad844c838bf1fb0707dcbc64', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '91c6b7158b6c4082876f94b32495a113', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 656.553286] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "refresh_cache-c6a358b7-0e6a-43bb-a171-5e6175f947bd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.553286] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "refresh_cache-c6a358b7-0e6a-43bb-a171-5e6175f947bd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.553286] env[68233]: DEBUG nova.network.neutron [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 656.571190] env[68233]: DEBUG nova.network.neutron [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Successfully created port: cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.643427] env[68233]: DEBUG nova.compute.manager [req-e4490e29-8690-40c0-87ba-b7d0b4c5c34a req-e385825f-5b79-45ac-961b-c19e241b5baf service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Received event network-vif-plugged-5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 656.643648] env[68233]: DEBUG oslo_concurrency.lockutils [req-e4490e29-8690-40c0-87ba-b7d0b4c5c34a req-e385825f-5b79-45ac-961b-c19e241b5baf service nova] Acquiring lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.643894] env[68233]: DEBUG oslo_concurrency.lockutils [req-e4490e29-8690-40c0-87ba-b7d0b4c5c34a req-e385825f-5b79-45ac-961b-c19e241b5baf service nova] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.644082] env[68233]: DEBUG oslo_concurrency.lockutils [req-e4490e29-8690-40c0-87ba-b7d0b4c5c34a req-e385825f-5b79-45ac-961b-c19e241b5baf service nova] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.644251] env[68233]: DEBUG nova.compute.manager [req-e4490e29-8690-40c0-87ba-b7d0b4c5c34a req-e385825f-5b79-45ac-961b-c19e241b5baf service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] No waiting events found dispatching network-vif-plugged-5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.644420] env[68233]: WARNING nova.compute.manager [req-e4490e29-8690-40c0-87ba-b7d0b4c5c34a req-e385825f-5b79-45ac-961b-c19e241b5baf service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Received unexpected event network-vif-plugged-5bbc186d-7708-4c96-a2a7-454a8aae1e5c for instance with vm_state building and task_state spawning. [ 656.653504] env[68233]: INFO nova.compute.manager [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Took 42.52 seconds to build instance. [ 656.685214] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 656.726237] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8e4402-7fdc-4ebe-bdea-8bb1fd067b85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.738563] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81632b28-3b5a-488f-86e2-ed2a2b178388 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.772947] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d129fb19-d022-46bd-983f-ca0fb9a4c634 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.782699] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352a3f8a-aea8-4635-9480-c351142b56a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.801485] env[68233]: DEBUG nova.compute.provider_tree [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.815061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.815256] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.997866] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52780665-3758-5bcf-2e09-3d80a20fe6ff/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 656.997866] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49265c5e-4fc9-48f4-966a-f458ffa4e81d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.004885] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52780665-3758-5bcf-2e09-3d80a20fe6ff/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 657.005097] env[68233]: ERROR oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52780665-3758-5bcf-2e09-3d80a20fe6ff/disk-0.vmdk due to incomplete transfer. [ 657.005338] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-55a36772-7e87-46d0-a082-a8df7ff8233e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.013060] env[68233]: DEBUG oslo_vmware.rw_handles [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52780665-3758-5bcf-2e09-3d80a20fe6ff/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 657.013322] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Uploaded image c5fcb8c8-99d6-4008-bbcd-dd80dc0016f1 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 657.016400] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 657.017162] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-761e9220-205e-4fbe-991b-458da94ab4bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.025691] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 657.025691] env[68233]: value = "task-2781918" [ 657.025691] env[68233]: _type = "Task" [ 657.025691] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.038504] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781918, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.113687] env[68233]: DEBUG nova.network.neutron [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 657.154663] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80be6edc-b58b-40ee-84a7-b5de502af646 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.449s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.304957] env[68233]: DEBUG nova.scheduler.client.report [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.504119] env[68233]: DEBUG nova.network.neutron [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Updating instance_info_cache with network_info: [{"id": "5bbc186d-7708-4c96-a2a7-454a8aae1e5c", "address": "fa:16:3e:45:29:78", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bbc186d-77", "ovs_interfaceid": "5bbc186d-7708-4c96-a2a7-454a8aae1e5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 657.539534] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781918, 'name': Destroy_Task, 'duration_secs': 0.387266} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.539822] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Destroyed the VM [ 657.541565] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 657.542189] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fb56e1f5-4c44-4f4a-bce6-0d833e44b5d9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.549303] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 657.549303] env[68233]: value = "task-2781919" [ 657.549303] env[68233]: _type = "Task" [ 657.549303] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.566727] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781919, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.663175] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 657.696059] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 657.729421] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 657.730741] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.730741] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 657.730741] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.730741] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 657.730741] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 657.731181] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 657.731181] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 657.731181] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 657.731181] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 657.732282] env[68233]: DEBUG nova.virt.hardware [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 657.732658] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93c6f39-357a-4843-864c-cba96ba8a837 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.741970] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-395fed15-630b-4fd6-8ebf-502dc2dafca7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.811772] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.814334] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.166s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.816044] env[68233]: INFO nova.compute.claims [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.844338] env[68233]: INFO nova.scheduler.client.report [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Deleted allocations for instance 68a4e635-381d-4dc2-879c-5581cd5e189a [ 658.006906] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "refresh_cache-c6a358b7-0e6a-43bb-a171-5e6175f947bd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.007185] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance network_info: |[{"id": "5bbc186d-7708-4c96-a2a7-454a8aae1e5c", "address": "fa:16:3e:45:29:78", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bbc186d-77", "ovs_interfaceid": "5bbc186d-7708-4c96-a2a7-454a8aae1e5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 658.007636] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:29:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bbc186d-7708-4c96-a2a7-454a8aae1e5c', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.015258] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating folder: Project (baab6817c97645bcae2e08502b7f96db). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.015606] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea68026e-29ad-4658-87a3-5a5661f8eec7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.034891] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created folder: Project (baab6817c97645bcae2e08502b7f96db) in parent group-v559223. [ 658.035305] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating folder: Instances. Parent ref: group-v559291. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 658.035379] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9881e86-17eb-4dc9-9b69-9d71abfa7577 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.044470] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created folder: Instances in parent group-v559291. [ 658.044720] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 658.044908] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 658.045126] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a68c46a9-0e48-49d8-8df0-1687e76d448c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.069086] env[68233]: DEBUG oslo_vmware.api [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781919, 'name': RemoveSnapshot_Task, 'duration_secs': 0.349013} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.070469] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 658.070939] env[68233]: INFO nova.compute.manager [None req-94e98e8a-ff4b-4d13-947f-52ce6a376c92 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Took 12.92 seconds to snapshot the instance on the hypervisor. [ 658.073818] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 658.073818] env[68233]: value = "task-2781922" [ 658.073818] env[68233]: _type = "Task" [ 658.073818] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.081920] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781922, 'name': CreateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.194321] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.206578] env[68233]: INFO nova.compute.manager [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Rescuing [ 658.206859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.207030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.207204] env[68233]: DEBUG nova.network.neutron [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 658.263255] env[68233]: DEBUG nova.network.neutron [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Successfully updated port: cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 658.357011] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34c80eb4-8916-4425-8df9-657acd459c9d tempest-ServersListShow298Test-1354555361 tempest-ServersListShow298Test-1354555361-project-member] Lock "68a4e635-381d-4dc2-879c-5581cd5e189a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.481s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.586837] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781922, 'name': CreateVM_Task, 'duration_secs': 0.348617} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.587020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 658.587738] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.587879] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.588249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 658.588507] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5e9ffd9-5f98-425c-984a-5272c371d1be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.593851] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 658.593851] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52019c82-1ced-842f-7368-168883600a35" [ 658.593851] env[68233]: _type = "Task" [ 658.593851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 658.603506] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52019c82-1ced-842f-7368-168883600a35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.733260] env[68233]: DEBUG nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Received event network-changed-5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 658.734336] env[68233]: DEBUG nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Refreshing instance network info cache due to event network-changed-5bbc186d-7708-4c96-a2a7-454a8aae1e5c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 658.734336] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Acquiring lock "refresh_cache-c6a358b7-0e6a-43bb-a171-5e6175f947bd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.734336] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Acquired lock "refresh_cache-c6a358b7-0e6a-43bb-a171-5e6175f947bd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.734452] env[68233]: DEBUG nova.network.neutron [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Refreshing network info cache for port 5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 658.762896] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.763063] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 658.763218] env[68233]: DEBUG nova.network.neutron [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.018095] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "19a1441d-9621-4e6e-ac38-8ad08206facf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.018255] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.018440] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "19a1441d-9621-4e6e-ac38-8ad08206facf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.018636] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.018821] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.025219] env[68233]: INFO nova.compute.manager [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Terminating instance [ 659.051226] env[68233]: DEBUG nova.network.neutron [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Updating instance_info_cache with network_info: [{"id": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "address": "fa:16:3e:95:fe:ef", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap757e5be0-4f", "ovs_interfaceid": "757e5be0-4fd2-40d3-b5fd-a6667126afc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.111852] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52019c82-1ced-842f-7368-168883600a35, 'name': SearchDatastore_Task, 'duration_secs': 0.010101} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.114214] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.114425] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 659.114664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.114853] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.115054] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.115635] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5341306e-c44e-4697-9d70-53aebf7517d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.124283] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.124472] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 659.125294] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9fa3a7d-127b-4a56-bb7c-0eea8ac1f946 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.130816] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 659.130816] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52272798-37bf-7251-469c-da0b1ea92752" [ 659.130816] env[68233]: _type = "Task" [ 659.130816] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.142642] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52272798-37bf-7251-469c-da0b1ea92752, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.321987] env[68233]: DEBUG nova.network.neutron [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 659.378217] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2758904-047c-461d-973b-b29ac27ca45b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.388666] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99b0b136-2f6c-4daf-9f17-2a5ad68e156a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.423637] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af7cb439-6fbb-422a-875d-927c79600c45 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.433036] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb7b69e-44bb-4584-9437-ddebe5e45e1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.445982] env[68233]: DEBUG nova.compute.provider_tree [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.529793] env[68233]: DEBUG nova.compute.manager [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 659.530092] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 659.530998] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6c2ac6-fdce-4340-89ac-1a31111392e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.539162] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 659.539402] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b5235a9-6bd6-432d-b682-d9d66b169b15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.553731] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-2812bf7c-5117-4fd9-9330-0cc94277bf5d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 659.603624] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 659.603840] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 659.604031] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleting the datastore file [datastore2] 19a1441d-9621-4e6e-ac38-8ad08206facf {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 659.604348] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8b38ee1-4ab5-4b97-8c96-cf98b5c86ec3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.612118] env[68233]: DEBUG oslo_vmware.api [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 659.612118] env[68233]: value = "task-2781924" [ 659.612118] env[68233]: _type = "Task" [ 659.612118] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.620877] env[68233]: DEBUG oslo_vmware.api [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.639918] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52272798-37bf-7251-469c-da0b1ea92752, 'name': SearchDatastore_Task, 'duration_secs': 0.010513} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.640686] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bbb5a59-a91c-425a-898b-4e60e662623b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.645884] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 659.645884] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d3dec7-c782-a60e-7c62-d66aa42f4587" [ 659.645884] env[68233]: _type = "Task" [ 659.645884] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.653550] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d3dec7-c782-a60e-7c62-d66aa42f4587, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.679093] env[68233]: DEBUG nova.network.neutron [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.697939] env[68233]: DEBUG nova.network.neutron [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Updated VIF entry in instance network info cache for port 5bbc186d-7708-4c96-a2a7-454a8aae1e5c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 659.698387] env[68233]: DEBUG nova.network.neutron [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Updating instance_info_cache with network_info: [{"id": "5bbc186d-7708-4c96-a2a7-454a8aae1e5c", "address": "fa:16:3e:45:29:78", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5bbc186d-77", "ovs_interfaceid": "5bbc186d-7708-4c96-a2a7-454a8aae1e5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.804186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "2a88648c-f00d-4d7b-905d-e70c327e248a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.804487] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.949670] env[68233]: DEBUG nova.scheduler.client.report [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 660.124602] env[68233]: DEBUG oslo_vmware.api [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2781924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19472} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.124872] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.125063] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 660.125242] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.125935] env[68233]: INFO nova.compute.manager [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Took 0.60 seconds to destroy the instance on the hypervisor. [ 660.125935] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.125935] env[68233]: DEBUG nova.compute.manager [-] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 660.125935] env[68233]: DEBUG nova.network.neutron [-] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.156608] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d3dec7-c782-a60e-7c62-d66aa42f4587, 'name': SearchDatastore_Task, 'duration_secs': 0.010267} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.156926] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.157251] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 660.157553] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-753c4b19-a0a5-41fa-b9df-cff753cb4ec6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.165029] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 660.165029] env[68233]: value = "task-2781925" [ 660.165029] env[68233]: _type = "Task" [ 660.165029] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.173107] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.181814] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.182261] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Instance network_info: |[{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 660.182691] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:44:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94926d5b-bfab-4c04-85b5-0fe89934c8ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf641b62-960d-40ec-9fdd-3b4845dcf864', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.190474] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Creating folder: Project (91c6b7158b6c4082876f94b32495a113). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.190767] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4744eaa-5ba6-471c-8439-f99ebdcff4be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.201458] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Releasing lock "refresh_cache-c6a358b7-0e6a-43bb-a171-5e6175f947bd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.201683] env[68233]: DEBUG nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received event network-vif-plugged-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 660.201935] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Acquiring lock "75f58a50-7891-42df-8820-c997300a3159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.202143] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Lock "75f58a50-7891-42df-8820-c997300a3159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.202329] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Lock "75f58a50-7891-42df-8820-c997300a3159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.202742] env[68233]: DEBUG nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] No waiting events found dispatching network-vif-plugged-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 660.203065] env[68233]: WARNING nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received unexpected event network-vif-plugged-cf641b62-960d-40ec-9fdd-3b4845dcf864 for instance with vm_state building and task_state spawning. [ 660.203348] env[68233]: DEBUG nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 660.203513] env[68233]: DEBUG nova.compute.manager [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing instance network info cache due to event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 660.203724] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.203933] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.204175] env[68233]: DEBUG nova.network.neutron [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 660.207975] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Created folder: Project (91c6b7158b6c4082876f94b32495a113) in parent group-v559223. [ 660.207975] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Creating folder: Instances. Parent ref: group-v559294. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.207975] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eda1bdcf-c11e-4729-8fc6-64d590830bde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.221339] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Created folder: Instances in parent group-v559294. [ 660.221339] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.221415] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f58a50-7891-42df-8820-c997300a3159] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 660.221681] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71770032-45c3-4e84-ab96-de7a677d935a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.246973] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.246973] env[68233]: value = "task-2781928" [ 660.246973] env[68233]: _type = "Task" [ 660.246973] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.258105] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781928, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.459438] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.461590] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.466068] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.401s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.469779] env[68233]: INFO nova.compute.claims [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 660.678205] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781925, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.759433] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781928, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.980623] env[68233]: DEBUG nova.compute.utils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 660.987729] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 660.988520] env[68233]: DEBUG nova.network.neutron [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 661.064395] env[68233]: DEBUG nova.policy [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43c48242abf540fe99d95f3d2df541ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baab6817c97645bcae2e08502b7f96db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 661.106691] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.107017] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64ba2b49-f458-4939-9814-da88f8101945 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.120985] env[68233]: DEBUG nova.compute.manager [req-0a5bf258-cc47-4713-8764-1c8548480028 req-256388af-2715-4688-aaae-841550b6ce79 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Received event network-vif-deleted-2e7be8f9-f275-4c54-ab25-eaa64558351c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 661.121250] env[68233]: INFO nova.compute.manager [req-0a5bf258-cc47-4713-8764-1c8548480028 req-256388af-2715-4688-aaae-841550b6ce79 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Neutron deleted interface 2e7be8f9-f275-4c54-ab25-eaa64558351c; detaching it from the instance and deleting it from the info cache [ 661.121460] env[68233]: DEBUG nova.network.neutron [req-0a5bf258-cc47-4713-8764-1c8548480028 req-256388af-2715-4688-aaae-841550b6ce79 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.123067] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 661.123067] env[68233]: value = "task-2781929" [ 661.123067] env[68233]: _type = "Task" [ 661.123067] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.137203] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781929, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.176270] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686716} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.176582] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 661.176799] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 661.177075] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3685e48-e010-4914-8164-c7c691075fad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.183777] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 661.183777] env[68233]: value = "task-2781930" [ 661.183777] env[68233]: _type = "Task" [ 661.183777] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.197070] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.258031] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781928, 'name': CreateVM_Task, 'duration_secs': 0.583149} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.258238] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75f58a50-7891-42df-8820-c997300a3159] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.259874] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.259874] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.259874] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.259874] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79290bde-fd89-4e92-8c5b-ed7ab949e020 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.262468] env[68233]: DEBUG nova.network.neutron [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updated VIF entry in instance network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 661.262656] env[68233]: DEBUG nova.network.neutron [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.267247] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 661.267247] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5282aabd-cbb3-6182-7635-2eaae93fc655" [ 661.267247] env[68233]: _type = "Task" [ 661.267247] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.277620] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5282aabd-cbb3-6182-7635-2eaae93fc655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.324844] env[68233]: DEBUG nova.network.neutron [-] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.489990] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 661.628578] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf8bb26c-729c-4aea-aac9-a6e03a5e8977 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.656503] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781929, 'name': PowerOffVM_Task, 'duration_secs': 0.202343} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.656845] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 661.663018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8fd03a1-f59f-4ccf-9082-8bc78380de80 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.671448] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967a6b27-da94-493d-942c-020b0309999c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.715914] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9e1889-626c-453e-830b-e1cb03ba6da5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.715914] env[68233]: DEBUG nova.compute.manager [req-0a5bf258-cc47-4713-8764-1c8548480028 req-256388af-2715-4688-aaae-841550b6ce79 service nova] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Detach interface failed, port_id=2e7be8f9-f275-4c54-ab25-eaa64558351c, reason: Instance 19a1441d-9621-4e6e-ac38-8ad08206facf could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 661.726083] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072913} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.726350] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 661.727215] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91658d1d-d819-4dd0-9ab3-3f3c655fed11 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.749689] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 661.754244] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-583bb560-2e9d-489d-b436-497f7503e699 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.770616] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dffe167-f87b-4554-9332-2ab940fca362 req-4fdee2ef-ae9f-43ca-bb4a-edc9495599da service nova] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.778285] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 661.779036] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb5203ca-3080-4ea5-b56a-7a888d85c6a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.785539] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5282aabd-cbb3-6182-7635-2eaae93fc655, 'name': SearchDatastore_Task, 'duration_secs': 0.010582} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.785832] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 661.785832] env[68233]: value = "task-2781931" [ 661.785832] env[68233]: _type = "Task" [ 661.785832] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.789090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.789254] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.789486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.789655] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.789850] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.791179] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e9643f1-6883-4fc0-9972-4a3d31a4c150 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.796428] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 661.796428] env[68233]: value = "task-2781932" [ 661.796428] env[68233]: _type = "Task" [ 661.796428] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.804240] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781931, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.813076] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 661.814885] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.814885] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.814885] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.814885] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 661.814885] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6e5fb46-93ce-46be-a049-7d30360975fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.820321] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 661.820321] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5266ef67-34c4-3e87-76eb-4f80975ad24e" [ 661.820321] env[68233]: _type = "Task" [ 661.820321] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.832525] env[68233]: INFO nova.compute.manager [-] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Took 1.71 seconds to deallocate network for instance. [ 661.832840] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5266ef67-34c4-3e87-76eb-4f80975ad24e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.980782] env[68233]: DEBUG nova.network.neutron [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Successfully created port: 4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.179484] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bdde53-6f63-4661-a9ae-993c172729be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.187443] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d116bb17-6f2b-4082-bdd6-95671d3021f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.219979] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f74ac6-4740-4bc5-a4a9-a2e062d91be6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.228327] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa5beb5-d348-4e44-87af-e656d68758f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.241929] env[68233]: DEBUG nova.compute.provider_tree [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.301126] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781931, 'name': ReconfigVM_Task, 'duration_secs': 0.296824} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.301126] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Reconfigured VM instance instance-00000016 to attach disk [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 662.301126] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f06a4ee-3266-487c-9ceb-77df049b5a34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.307216] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 662.307216] env[68233]: value = "task-2781933" [ 662.307216] env[68233]: _type = "Task" [ 662.307216] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.316245] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781933, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.329604] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5266ef67-34c4-3e87-76eb-4f80975ad24e, 'name': SearchDatastore_Task, 'duration_secs': 0.009117} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.330566] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba372c9-274c-4222-861b-7859fec20db3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.336197] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 662.336197] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523b87e8-35a4-dd1a-f003-b7b5c9ee1bb9" [ 662.336197] env[68233]: _type = "Task" [ 662.336197] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.341197] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.344400] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523b87e8-35a4-dd1a-f003-b7b5c9ee1bb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.511919] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.544367] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.544367] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.544535] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.544579] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.544784] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.545370] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.545437] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.545625] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.546043] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.546242] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.546416] env[68233]: DEBUG nova.virt.hardware [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.547367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0492069-a6a8-42c6-95d3-9acae2595fd4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.556678] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1089ffa5-e21b-4f5d-9004-35b390483aae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.750034] env[68233]: DEBUG nova.scheduler.client.report [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.821972] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781933, 'name': Rename_Task, 'duration_secs': 0.208403} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.822922] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 662.822922] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-edb73e9d-553e-4bf9-9c3b-27fceec24d4a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.830480] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 662.830480] env[68233]: value = "task-2781934" [ 662.830480] env[68233]: _type = "Task" [ 662.830480] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.842897] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.848522] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523b87e8-35a4-dd1a-f003-b7b5c9ee1bb9, 'name': SearchDatastore_Task, 'duration_secs': 0.02794} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.849184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.849358] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 75f58a50-7891-42df-8820-c997300a3159/75f58a50-7891-42df-8820-c997300a3159.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 662.849669] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.850140] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 662.850140] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5dbd016c-d8d4-4029-bc0b-069142e84d98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.854438] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-678ee4b7-b564-468a-8c8c-e65b4b008883 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.859724] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 662.859724] env[68233]: value = "task-2781935" [ 662.859724] env[68233]: _type = "Task" [ 662.859724] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.864370] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 662.864370] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 662.865268] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2993628b-bba7-40ac-97bc-3d0b261d974e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.871255] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781935, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.873548] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 662.873548] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a634a9-c1d1-e711-e33a-51cb4865e42e" [ 662.873548] env[68233]: _type = "Task" [ 662.873548] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.881803] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a634a9-c1d1-e711-e33a-51cb4865e42e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.258017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.791s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.259865] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 663.261667] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.829s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.263365] env[68233]: INFO nova.compute.claims [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 663.345368] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781934, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.370898] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781935, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.391179] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a634a9-c1d1-e711-e33a-51cb4865e42e, 'name': SearchDatastore_Task, 'duration_secs': 0.010302} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.392405] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d543f8-9a3d-4e96-aead-c1d81ff78072 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.399898] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 663.399898] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a010d0-c06b-e1a6-30ab-e3c2d334b991" [ 663.399898] env[68233]: _type = "Task" [ 663.399898] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.410719] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a010d0-c06b-e1a6-30ab-e3c2d334b991, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.768206] env[68233]: DEBUG nova.compute.utils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.776096] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.776283] env[68233]: DEBUG nova.network.neutron [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.849267] env[68233]: DEBUG oslo_vmware.api [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781934, 'name': PowerOnVM_Task, 'duration_secs': 0.767924} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.849988] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 663.850292] env[68233]: INFO nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Took 8.85 seconds to spawn the instance on the hypervisor. [ 663.850709] env[68233]: DEBUG nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 663.852637] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7ba2a4-0450-4d36-bada-fded9759a086 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.870789] env[68233]: DEBUG nova.network.neutron [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Successfully updated port: 4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.877699] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781935, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539564} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.878191] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 75f58a50-7891-42df-8820-c997300a3159/75f58a50-7891-42df-8820-c997300a3159.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 663.879473] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 663.879473] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ee00ff64-6fae-40c7-8d54-00fb681f40d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.882830] env[68233]: DEBUG nova.policy [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84b53e3749ad4eeda9caf91f835539bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f92048d34e7c4863bbfbaec1273c122f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 663.889522] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 663.889522] env[68233]: value = "task-2781936" [ 663.889522] env[68233]: _type = "Task" [ 663.889522] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.900874] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.912564] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.912564] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.917667] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a010d0-c06b-e1a6-30ab-e3c2d334b991, 'name': SearchDatastore_Task, 'duration_secs': 0.010927} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.918158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.918464] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. {{(pid=68233) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 663.918883] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e7ec2a2c-b07e-4c8d-bc66-013c63c090f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.925624] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 663.925624] env[68233]: value = "task-2781937" [ 663.925624] env[68233]: _type = "Task" [ 663.925624] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.934037] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.279491] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 664.286787] env[68233]: DEBUG nova.compute.manager [req-da210196-6b8c-4100-b0bf-b1b42bcb8bea req-6cc68e7c-5862-48ac-97da-d1190ed4bbd5 service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Received event network-vif-plugged-4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 664.290018] env[68233]: DEBUG oslo_concurrency.lockutils [req-da210196-6b8c-4100-b0bf-b1b42bcb8bea req-6cc68e7c-5862-48ac-97da-d1190ed4bbd5 service nova] Acquiring lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.290018] env[68233]: DEBUG oslo_concurrency.lockutils [req-da210196-6b8c-4100-b0bf-b1b42bcb8bea req-6cc68e7c-5862-48ac-97da-d1190ed4bbd5 service nova] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 664.290018] env[68233]: DEBUG oslo_concurrency.lockutils [req-da210196-6b8c-4100-b0bf-b1b42bcb8bea req-6cc68e7c-5862-48ac-97da-d1190ed4bbd5 service nova] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.290018] env[68233]: DEBUG nova.compute.manager [req-da210196-6b8c-4100-b0bf-b1b42bcb8bea req-6cc68e7c-5862-48ac-97da-d1190ed4bbd5 service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] No waiting events found dispatching network-vif-plugged-4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 664.290018] env[68233]: WARNING nova.compute.manager [req-da210196-6b8c-4100-b0bf-b1b42bcb8bea req-6cc68e7c-5862-48ac-97da-d1190ed4bbd5 service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Received unexpected event network-vif-plugged-4a662388-bd69-4cf2-bc5f-b97de14b1ee3 for instance with vm_state building and task_state spawning. [ 664.373220] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "refresh_cache-6ceb7d2d-143a-464a-aca5-6b6838630bb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.373437] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "refresh_cache-6ceb7d2d-143a-464a-aca5-6b6838630bb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.373627] env[68233]: DEBUG nova.network.neutron [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.374897] env[68233]: INFO nova.compute.manager [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Took 41.79 seconds to build instance. [ 664.379883] env[68233]: DEBUG nova.network.neutron [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Successfully created port: 69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.402370] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102169} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.405302] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.405981] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e2ec60-8344-48ed-b0ef-0025f9abf0f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.428361] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 75f58a50-7891-42df-8820-c997300a3159/75f58a50-7891-42df-8820-c997300a3159.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.431462] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-850f5f50-2029-4335-8977-606d5f049b4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.457459] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781937, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.461095] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 664.461095] env[68233]: value = "task-2781938" [ 664.461095] env[68233]: _type = "Task" [ 664.461095] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.470679] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781938, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.878943] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8dedaaa3-1adf-4e30-86d6-feebd1ba6541 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.561s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.910950] env[68233]: DEBUG nova.network.neutron [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 665.606734] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 665.609980] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 665.612456] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.923642} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.622077] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. [ 665.622077] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7891b425-3fe7-4945-ac8d-772c9e553ec8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.624554] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "876d428d-d5c9-422a-aba2-2d6c61b092db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.624774] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.625403] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e792c5c-8664-4fa6-a9b6-c1c0df2cf023 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.648388] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa9f8b7-2c59-4bed-9ffe-31f60547bc46 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.651670] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781938, 'name': ReconfigVM_Task, 'duration_secs': 0.466125} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.661461] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.661862] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 75f58a50-7891-42df-8820-c997300a3159/75f58a50-7891-42df-8820-c997300a3159.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.662529] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b4919a9-baa4-4f92-9339-1b16e10a1ed8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.678603] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18c24ba1-08da-4d0a-99d1-7cde3d39cc1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.706157] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 665.706393] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.708016] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 665.708016] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.708016] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 665.708016] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 665.708016] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 665.708366] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 665.708366] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 665.708366] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 665.708366] env[68233]: DEBUG nova.virt.hardware [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 665.709812] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef759f4-17e3-431d-b23a-dd2b18260779 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.713126] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aadefcb-1627-4305-bc1d-73ad8ea43604 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.717438] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 665.717438] env[68233]: value = "task-2781939" [ 665.717438] env[68233]: _type = "Task" [ 665.717438] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.719036] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 665.719036] env[68233]: value = "task-2781940" [ 665.719036] env[68233]: _type = "Task" [ 665.719036] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.730315] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1df5db2b-ee27-493d-8c0f-f315683873c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.735466] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5a7aa5-287c-4412-8278-b7b8a7054189 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.742627] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781939, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.763773] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781940, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.764234] env[68233]: DEBUG nova.compute.provider_tree [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.810135] env[68233]: DEBUG nova.network.neutron [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Updating instance_info_cache with network_info: [{"id": "4a662388-bd69-4cf2-bc5f-b97de14b1ee3", "address": "fa:16:3e:23:65:93", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a662388-bd", "ovs_interfaceid": "4a662388-bd69-4cf2-bc5f-b97de14b1ee3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.142796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.232425] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781939, 'name': ReconfigVM_Task, 'duration_secs': 0.462223} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.238021] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.238021] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781940, 'name': Rename_Task, 'duration_secs': 0.296846} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.238021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bc08df-f69c-4900-b627-aaada5b41316 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.239388] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.239626] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84381d62-1845-4c7b-a6e7-db474d02d8ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.266103] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1edd80d2-75e4-40e3-b041-c873bf3124b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.276730] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 666.276730] env[68233]: value = "task-2781941" [ 666.276730] env[68233]: _type = "Task" [ 666.276730] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.278754] env[68233]: DEBUG nova.scheduler.client.report [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 666.282910] env[68233]: DEBUG nova.compute.manager [req-c2ee8e0c-6f9c-401a-aa41-376a09eb6b21 req-50038164-806c-4e50-8183-369f2dbcb606 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Received event network-vif-plugged-69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 666.283213] env[68233]: DEBUG oslo_concurrency.lockutils [req-c2ee8e0c-6f9c-401a-aa41-376a09eb6b21 req-50038164-806c-4e50-8183-369f2dbcb606 service nova] Acquiring lock "769956c6-7824-41db-9779-fc1b5f53dd94-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.283326] env[68233]: DEBUG oslo_concurrency.lockutils [req-c2ee8e0c-6f9c-401a-aa41-376a09eb6b21 req-50038164-806c-4e50-8183-369f2dbcb606 service nova] Lock "769956c6-7824-41db-9779-fc1b5f53dd94-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.283491] env[68233]: DEBUG oslo_concurrency.lockutils [req-c2ee8e0c-6f9c-401a-aa41-376a09eb6b21 req-50038164-806c-4e50-8183-369f2dbcb606 service nova] Lock "769956c6-7824-41db-9779-fc1b5f53dd94-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.283659] env[68233]: DEBUG nova.compute.manager [req-c2ee8e0c-6f9c-401a-aa41-376a09eb6b21 req-50038164-806c-4e50-8183-369f2dbcb606 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] No waiting events found dispatching network-vif-plugged-69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 666.283822] env[68233]: WARNING nova.compute.manager [req-c2ee8e0c-6f9c-401a-aa41-376a09eb6b21 req-50038164-806c-4e50-8183-369f2dbcb606 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Received unexpected event network-vif-plugged-69a128c9-1103-4a47-9adf-f0e87598c6c4 for instance with vm_state building and task_state spawning. [ 666.289451] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 666.289451] env[68233]: value = "task-2781942" [ 666.289451] env[68233]: _type = "Task" [ 666.289451] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.300125] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781941, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.302868] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.312155] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "refresh_cache-6ceb7d2d-143a-464a-aca5-6b6838630bb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.312525] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Instance network_info: |[{"id": "4a662388-bd69-4cf2-bc5f-b97de14b1ee3", "address": "fa:16:3e:23:65:93", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a662388-bd", "ovs_interfaceid": "4a662388-bd69-4cf2-bc5f-b97de14b1ee3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 666.312941] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:65:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a662388-bd69-4cf2-bc5f-b97de14b1ee3', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 666.320697] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 666.321558] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 666.321770] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0a3c1c3-e98e-405d-8dd6-ed8d0e3aea28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.345962] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 666.345962] env[68233]: value = "task-2781943" [ 666.345962] env[68233]: _type = "Task" [ 666.345962] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.354627] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781943, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.369120] env[68233]: DEBUG nova.compute.manager [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Received event network-changed-4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 666.369351] env[68233]: DEBUG nova.compute.manager [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Refreshing instance network info cache due to event network-changed-4a662388-bd69-4cf2-bc5f-b97de14b1ee3. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 666.369670] env[68233]: DEBUG oslo_concurrency.lockutils [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] Acquiring lock "refresh_cache-6ceb7d2d-143a-464a-aca5-6b6838630bb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.369893] env[68233]: DEBUG oslo_concurrency.lockutils [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] Acquired lock "refresh_cache-6ceb7d2d-143a-464a-aca5-6b6838630bb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.370096] env[68233]: DEBUG nova.network.neutron [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Refreshing network info cache for port 4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 666.387241] env[68233]: DEBUG nova.network.neutron [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Successfully updated port: 69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.787510] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.526s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.789055] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.199s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.789297] env[68233]: DEBUG nova.objects.instance [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lazy-loading 'resources' on Instance uuid 38c86c2b-9b2b-482e-b26d-066208467202 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 666.798663] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781941, 'name': PowerOnVM_Task} progress is 91%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.804785] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781942, 'name': ReconfigVM_Task, 'duration_secs': 0.185068} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.805083] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.805381] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-174da351-863b-4d69-b592-b21ca49c5522 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.814224] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 666.814224] env[68233]: value = "task-2781944" [ 666.814224] env[68233]: _type = "Task" [ 666.814224] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.823148] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781944, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.855927] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781943, 'name': CreateVM_Task, 'duration_secs': 0.340375} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.856111] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 666.856792] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.856957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.857320] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 666.857572] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b46398f-0017-4d42-a644-403d41ea93be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.862293] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 666.862293] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529f4188-b50a-0f52-2fbc-f82ed1ba0d78" [ 666.862293] env[68233]: _type = "Task" [ 666.862293] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.877031] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529f4188-b50a-0f52-2fbc-f82ed1ba0d78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.890984] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "refresh_cache-769956c6-7824-41db-9779-fc1b5f53dd94" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.891166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquired lock "refresh_cache-769956c6-7824-41db-9779-fc1b5f53dd94" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.891381] env[68233]: DEBUG nova.network.neutron [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 667.147150] env[68233]: DEBUG nova.network.neutron [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Updated VIF entry in instance network info cache for port 4a662388-bd69-4cf2-bc5f-b97de14b1ee3. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 667.147512] env[68233]: DEBUG nova.network.neutron [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Updating instance_info_cache with network_info: [{"id": "4a662388-bd69-4cf2-bc5f-b97de14b1ee3", "address": "fa:16:3e:23:65:93", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a662388-bd", "ovs_interfaceid": "4a662388-bd69-4cf2-bc5f-b97de14b1ee3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.297074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "b5ae9993-0c1e-4c69-9b1c-cc8b218ea257" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.297328] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "b5ae9993-0c1e-4c69-9b1c-cc8b218ea257" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.298709] env[68233]: DEBUG oslo_vmware.api [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2781941, 'name': PowerOnVM_Task, 'duration_secs': 0.673661} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.299314] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.299513] env[68233]: INFO nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Took 9.60 seconds to spawn the instance on the hypervisor. [ 667.299732] env[68233]: DEBUG nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.300514] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0d85aa-0592-47ec-bfad-f3ff89033cb8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.324340] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781944, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.371993] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529f4188-b50a-0f52-2fbc-f82ed1ba0d78, 'name': SearchDatastore_Task, 'duration_secs': 0.017175} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.378021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.378021] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 667.378021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.378021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 667.378280] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 667.378280] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41d2c043-16e7-4f46-ba20-cf3274625040 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.384204] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 667.384382] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 667.385154] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afadd070-5f6c-4a1c-a096-51fea443a416 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.394147] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 667.394147] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5260bfeb-19f9-298d-9f2f-8a40239f2035" [ 667.394147] env[68233]: _type = "Task" [ 667.394147] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.404626] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5260bfeb-19f9-298d-9f2f-8a40239f2035, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.452274] env[68233]: DEBUG nova.network.neutron [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.655067] env[68233]: DEBUG oslo_concurrency.lockutils [req-19a30737-accb-4f2a-bf91-0c7d0ba3355e req-7af02f83-3fa1-4988-be84-9081f06062fd service nova] Releasing lock "refresh_cache-6ceb7d2d-143a-464a-aca5-6b6838630bb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.712035] env[68233]: DEBUG nova.network.neutron [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Updating instance_info_cache with network_info: [{"id": "69a128c9-1103-4a47-9adf-f0e87598c6c4", "address": "fa:16:3e:59:f0:c7", "network": {"id": "c7a128c0-259c-4e80-9d28-8fff739fad3d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1251487550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f92048d34e7c4863bbfbaec1273c122f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69a128c9-11", "ovs_interfaceid": "69a128c9-1103-4a47-9adf-f0e87598c6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.800017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "b5ae9993-0c1e-4c69-9b1c-cc8b218ea257" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.800562] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.823945] env[68233]: INFO nova.compute.manager [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Took 43.25 seconds to build instance. [ 667.828366] env[68233]: DEBUG oslo_vmware.api [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781944, 'name': PowerOnVM_Task, 'duration_secs': 0.770713} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.828366] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 667.834371] env[68233]: DEBUG nova.compute.manager [None req-d138a216-00cd-48e5-ae7e-d92fdffe07bc tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 667.835176] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afdd02ba-3237-4c42-8551-6cbb321b2595 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.868137] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74365a97-0752-440e-8660-3a670feab5bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.875176] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8798d6-cf6c-41f9-80b5-8c516d8f9610 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.909582] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd3b1d-8a38-42f9-b3b6-ccbb35fab20e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.919248] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5260bfeb-19f9-298d-9f2f-8a40239f2035, 'name': SearchDatastore_Task, 'duration_secs': 0.01189} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.921907] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdce57bb-5adf-4f26-9c58-1a84a1c4de62 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.927670] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9096ecc-343f-4ccd-8cf1-6e8b045d0c52 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.932515] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 667.932515] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528dee16-0665-3d25-7190-9ac38b0ccc4d" [ 667.932515] env[68233]: _type = "Task" [ 667.932515] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.941260] env[68233]: DEBUG nova.compute.provider_tree [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.950666] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528dee16-0665-3d25-7190-9ac38b0ccc4d, 'name': SearchDatastore_Task, 'duration_secs': 0.008791} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.951557] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 667.951827] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6ceb7d2d-143a-464a-aca5-6b6838630bb8/6ceb7d2d-143a-464a-aca5-6b6838630bb8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 667.952132] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2bef69f-5b02-4126-bbd3-9e92adf4d556 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.958547] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 667.958547] env[68233]: value = "task-2781945" [ 667.958547] env[68233]: _type = "Task" [ 667.958547] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.966272] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.214733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Releasing lock "refresh_cache-769956c6-7824-41db-9779-fc1b5f53dd94" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.215132] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Instance network_info: |[{"id": "69a128c9-1103-4a47-9adf-f0e87598c6c4", "address": "fa:16:3e:59:f0:c7", "network": {"id": "c7a128c0-259c-4e80-9d28-8fff739fad3d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1251487550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f92048d34e7c4863bbfbaec1273c122f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69a128c9-11", "ovs_interfaceid": "69a128c9-1103-4a47-9adf-f0e87598c6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 668.215542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:f0:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7041d198-66a3-40de-bf7d-cfc036e6ed69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '69a128c9-1103-4a47-9adf-f0e87598c6c4', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.224944] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Creating folder: Project (f92048d34e7c4863bbfbaec1273c122f). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.225355] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e76ab17a-fd10-4386-add1-15c8bce0abed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.237542] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Created folder: Project (f92048d34e7c4863bbfbaec1273c122f) in parent group-v559223. [ 668.237844] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Creating folder: Instances. Parent ref: group-v559298. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.238096] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf16182f-d0f3-4591-8070-d41540ebad28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.249307] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Created folder: Instances in parent group-v559298. [ 668.249675] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.249902] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 668.250101] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e82144d7-d8e5-4261-8e58-815984e0e514 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.271095] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.271095] env[68233]: value = "task-2781948" [ 668.271095] env[68233]: _type = "Task" [ 668.271095] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.279418] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781948, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.309667] env[68233]: DEBUG nova.compute.utils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.311759] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 668.312031] env[68233]: DEBUG nova.network.neutron [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.325155] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b77a1ed8-bb34-4eaf-9ba2-370fc2241358 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "75f58a50-7891-42df-8820-c997300a3159" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.921s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.447295] env[68233]: DEBUG nova.scheduler.client.report [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 668.458139] env[68233]: DEBUG nova.policy [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e71ddfeba872493f9dc584c1a0aac1e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '514e26678c904a209c56a7d55b093df9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 668.475307] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439258} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.476576] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6ceb7d2d-143a-464a-aca5-6b6838630bb8/6ceb7d2d-143a-464a-aca5-6b6838630bb8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 668.477060] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 668.477985] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b61a6a6e-779c-4193-b193-f119f3db3a4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.487431] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 668.487431] env[68233]: value = "task-2781949" [ 668.487431] env[68233]: _type = "Task" [ 668.487431] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.499927] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781949, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.523162] env[68233]: DEBUG nova.compute.manager [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Received event network-changed-69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 668.523394] env[68233]: DEBUG nova.compute.manager [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Refreshing instance network info cache due to event network-changed-69a128c9-1103-4a47-9adf-f0e87598c6c4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 668.524055] env[68233]: DEBUG oslo_concurrency.lockutils [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] Acquiring lock "refresh_cache-769956c6-7824-41db-9779-fc1b5f53dd94" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.524327] env[68233]: DEBUG oslo_concurrency.lockutils [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] Acquired lock "refresh_cache-769956c6-7824-41db-9779-fc1b5f53dd94" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.524577] env[68233]: DEBUG nova.network.neutron [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Refreshing network info cache for port 69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 668.782695] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781948, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.815269] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.827981] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.953301] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.163s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.955365] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.758s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.955725] env[68233]: DEBUG nova.objects.instance [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lazy-loading 'resources' on Instance uuid d19421ad-88d5-4479-a6e4-c6d59e863b31 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 668.996776] env[68233]: INFO nova.scheduler.client.report [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleted allocations for instance 38c86c2b-9b2b-482e-b26d-066208467202 [ 669.008312] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781949, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064997} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.008312] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 669.008312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07948899-f3bb-43d3-800b-06d2d844225c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.031782] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] 6ceb7d2d-143a-464a-aca5-6b6838630bb8/6ceb7d2d-143a-464a-aca5-6b6838630bb8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 669.034307] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdf84c4a-c7d9-4ab9-99b4-3999a66af1df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.055665] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 669.055665] env[68233]: value = "task-2781950" [ 669.055665] env[68233]: _type = "Task" [ 669.055665] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.067149] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.190435] env[68233]: DEBUG nova.network.neutron [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Successfully created port: 993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.275141] env[68233]: DEBUG nova.network.neutron [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Updated VIF entry in instance network info cache for port 69a128c9-1103-4a47-9adf-f0e87598c6c4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 669.275141] env[68233]: DEBUG nova.network.neutron [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Updating instance_info_cache with network_info: [{"id": "69a128c9-1103-4a47-9adf-f0e87598c6c4", "address": "fa:16:3e:59:f0:c7", "network": {"id": "c7a128c0-259c-4e80-9d28-8fff739fad3d", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1251487550-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f92048d34e7c4863bbfbaec1273c122f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7041d198-66a3-40de-bf7d-cfc036e6ed69", "external-id": "nsx-vlan-transportzone-278", "segmentation_id": 278, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap69a128c9-11", "ovs_interfaceid": "69a128c9-1103-4a47-9adf-f0e87598c6c4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.285513] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781948, 'name': CreateVM_Task, 'duration_secs': 0.839819} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.287142] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.287984] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.288308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.288739] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.289369] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dea0faa-32ad-452b-808e-c37690e61cb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.294486] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 669.294486] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f38f0b-9819-fd5c-c02c-e546d016f7c1" [ 669.294486] env[68233]: _type = "Task" [ 669.294486] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.304526] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f38f0b-9819-fd5c-c02c-e546d016f7c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.363092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.506687] env[68233]: DEBUG oslo_concurrency.lockutils [None req-203eb18c-6d2b-49f3-941f-ea2ca7055429 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "38c86c2b-9b2b-482e-b26d-066208467202" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.441s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.569942] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781950, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.780141] env[68233]: DEBUG oslo_concurrency.lockutils [req-dbd2167a-abde-4742-8dff-533cb76d6f5f req-29d77a70-b6a3-4334-9507-2b25a3a833c0 service nova] Releasing lock "refresh_cache-769956c6-7824-41db-9779-fc1b5f53dd94" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.810888] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f38f0b-9819-fd5c-c02c-e546d016f7c1, 'name': SearchDatastore_Task, 'duration_secs': 0.040121} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.811251] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.811482] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.811713] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.812472] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.812686] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.812990] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31786b95-c348-4d83-a7d4-0edb0188f7e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.821485] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.821666] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.828614] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6b0ebc6-8b4b-4e98-8c22-43f6d8022097 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.831838] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.841934] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 669.841934] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7bb98-ba18-db17-a245-372fe1e1b003" [ 669.841934] env[68233]: _type = "Task" [ 669.841934] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.855230] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7bb98-ba18-db17-a245-372fe1e1b003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.857747] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 669.857976] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.858158] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 669.858351] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.858506] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 669.858655] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 669.858866] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 669.859033] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 669.859204] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 669.859660] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 669.859983] env[68233]: DEBUG nova.virt.hardware [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 669.860870] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17daa67d-0040-4f5b-89b7-6b58fab7fd6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.871744] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d062950f-b070-4100-a9ba-9f0603cf39d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.067152] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781950, 'name': ReconfigVM_Task, 'duration_secs': 0.569647} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.068517] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Reconfigured VM instance instance-00000018 to attach disk [datastore2] 6ceb7d2d-143a-464a-aca5-6b6838630bb8/6ceb7d2d-143a-464a-aca5-6b6838630bb8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 670.069726] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-967fc8d6-0724-4eca-8bdf-52976a09480f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.072422] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dfa3aa1-80cb-47cb-ab51-fa1225cd7527 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.080057] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf343b8-2d83-4711-848f-16ccdc860bad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.082963] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 670.082963] env[68233]: value = "task-2781951" [ 670.082963] env[68233]: _type = "Task" [ 670.082963] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.112688] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ae148a-7ed3-41cd-b61b-0fec6d015d93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.119847] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781951, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.124394] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf4b4bc-bc9f-4b38-bc5e-50a762a6d267 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.140365] env[68233]: DEBUG nova.compute.provider_tree [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.355546] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7bb98-ba18-db17-a245-372fe1e1b003, 'name': SearchDatastore_Task, 'duration_secs': 0.019037} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.356311] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a412d4c8-03ca-4d46-8ad9-77ab105a5677 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.362720] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 670.362720] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7a968-433a-9685-10b7-ca17f27afdbc" [ 670.362720] env[68233]: _type = "Task" [ 670.362720] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.371715] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7a968-433a-9685-10b7-ca17f27afdbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.556782] env[68233]: DEBUG nova.compute.manager [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 670.556963] env[68233]: DEBUG nova.compute.manager [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing instance network info cache due to event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 670.557284] env[68233]: DEBUG oslo_concurrency.lockutils [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.557346] env[68233]: DEBUG oslo_concurrency.lockutils [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.557478] env[68233]: DEBUG nova.network.neutron [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 670.593338] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781951, 'name': Rename_Task, 'duration_secs': 0.178612} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.593792] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 670.594189] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b6d344a-5f35-4e39-b460-61492132a052 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.600540] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 670.600540] env[68233]: value = "task-2781952" [ 670.600540] env[68233]: _type = "Task" [ 670.600540] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.608279] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.644226] env[68233]: DEBUG nova.scheduler.client.report [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.873116] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7a968-433a-9685-10b7-ca17f27afdbc, 'name': SearchDatastore_Task, 'duration_secs': 0.023488} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.873380] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.873638] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 769956c6-7824-41db-9779-fc1b5f53dd94/769956c6-7824-41db-9779-fc1b5f53dd94.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.873937] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddc4962c-f32f-4180-aad9-2dd882670d38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.880416] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 670.880416] env[68233]: value = "task-2781953" [ 670.880416] env[68233]: _type = "Task" [ 670.880416] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.893288] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.112812] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.150401] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.194s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.153267] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.677s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.155050] env[68233]: INFO nova.compute.claims [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 671.167914] env[68233]: DEBUG nova.network.neutron [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Successfully updated port: 993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 671.186307] env[68233]: INFO nova.scheduler.client.report [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Deleted allocations for instance d19421ad-88d5-4479-a6e4-c6d59e863b31 [ 671.364449] env[68233]: DEBUG nova.network.neutron [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updated VIF entry in instance network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 671.364983] env[68233]: DEBUG nova.network.neutron [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.390500] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781953, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.526214] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.526391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.610887] env[68233]: DEBUG oslo_vmware.api [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2781952, 'name': PowerOnVM_Task, 'duration_secs': 0.932964} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.611228] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 671.611412] env[68233]: INFO nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Took 9.10 seconds to spawn the instance on the hypervisor. [ 671.611556] env[68233]: DEBUG nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 671.612341] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e3d427-9793-4af4-a327-1ad965e59e76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.672270] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "refresh_cache-6ae76b0f-7df2-4652-b4c3-92c16ed487a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.672517] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquired lock "refresh_cache-6ae76b0f-7df2-4652-b4c3-92c16ed487a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.672730] env[68233]: DEBUG nova.network.neutron [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.698908] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6df24901-c137-436a-b2b7-a0fa0ef02ccc tempest-ImagesNegativeTestJSON-116409968 tempest-ImagesNegativeTestJSON-116409968-project-member] Lock "d19421ad-88d5-4479-a6e4-c6d59e863b31" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.139s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.813804] env[68233]: INFO nova.compute.manager [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Rescuing [ 671.813804] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.813804] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.813804] env[68233]: DEBUG nova.network.neutron [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.868073] env[68233]: DEBUG oslo_concurrency.lockutils [req-32852904-3356-49c2-aeb3-d222ba55066b req-b59f6b36-0685-41de-a851-d1f30d696350 service nova] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.891601] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781953, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661154} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.891891] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 769956c6-7824-41db-9779-fc1b5f53dd94/769956c6-7824-41db-9779-fc1b5f53dd94.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.892147] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.892443] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0951b01a-3509-4e05-8ad3-4caf803cdbe1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.899318] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 671.899318] env[68233]: value = "task-2781954" [ 671.899318] env[68233]: _type = "Task" [ 671.899318] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.906882] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781954, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.130595] env[68233]: INFO nova.compute.manager [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Took 41.50 seconds to build instance. [ 672.218679] env[68233]: DEBUG nova.network.neutron [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 672.409070] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781954, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066937} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.409380] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.410167] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129d7bbd-5051-41ea-8039-720b50c7b905 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.435960] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 769956c6-7824-41db-9779-fc1b5f53dd94/769956c6-7824-41db-9779-fc1b5f53dd94.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.443243] env[68233]: DEBUG nova.network.neutron [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Updating instance_info_cache with network_info: [{"id": "993df01a-288b-4332-8976-cf2b951c477a", "address": "fa:16:3e:3c:75:98", "network": {"id": "bdac9594-bd2c-4e15-9447-7ba27e417630", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-315423907-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "514e26678c904a209c56a7d55b093df9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993df01a-28", "ovs_interfaceid": "993df01a-288b-4332-8976-cf2b951c477a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.444415] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d238fa85-8a4e-4c03-a412-cfe86cb6ade5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.468544] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 672.468544] env[68233]: value = "task-2781955" [ 672.468544] env[68233]: _type = "Task" [ 672.468544] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.479929] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781955, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.632968] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f41b815d-110f-41ec-8c36-27aaa633873b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.301s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.741415] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70d7b0b-64d4-4dd4-a9a9-b46d31ed25c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.749311] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926b4ef7-5c06-4be0-9530-ba3416e39a8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.791035] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ccd233-8f1a-49a1-a224-45ca360c9859 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.800958] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcae431e-d9e5-4eb3-a5f8-cdc14d929f06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.817212] env[68233]: DEBUG nova.compute.provider_tree [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.895615] env[68233]: DEBUG nova.network.neutron [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Updating instance_info_cache with network_info: [{"id": "a38db034-3553-49b5-afdc-1b75d897f720", "address": "fa:16:3e:c1:7e:7e", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa38db034-35", "ovs_interfaceid": "a38db034-3553-49b5-afdc-1b75d897f720", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.962668] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Releasing lock "refresh_cache-6ae76b0f-7df2-4652-b4c3-92c16ed487a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.963072] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Instance network_info: |[{"id": "993df01a-288b-4332-8976-cf2b951c477a", "address": "fa:16:3e:3c:75:98", "network": {"id": "bdac9594-bd2c-4e15-9447-7ba27e417630", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-315423907-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "514e26678c904a209c56a7d55b093df9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993df01a-28", "ovs_interfaceid": "993df01a-288b-4332-8976-cf2b951c477a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 672.963558] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:75:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4576b9d4-535c-40aa-b078-246f671f216e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '993df01a-288b-4332-8976-cf2b951c477a', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.973777] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Creating folder: Project (514e26678c904a209c56a7d55b093df9). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.974668] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a16695ea-76bc-4fb6-97e7-33995dcd3e8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.987827] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781955, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.990532] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Created folder: Project (514e26678c904a209c56a7d55b093df9) in parent group-v559223. [ 672.990741] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Creating folder: Instances. Parent ref: group-v559301. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 672.990989] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-384ddece-9151-411e-a90b-c920bc2ab29e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.002302] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Created folder: Instances in parent group-v559301. [ 673.002706] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.002940] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 673.002981] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-920f6bce-80a1-4182-a6ee-6c0cabdc4dde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.022322] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 673.022322] env[68233]: value = "task-2781958" [ 673.022322] env[68233]: _type = "Task" [ 673.022322] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.029905] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781958, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.135404] env[68233]: DEBUG nova.compute.manager [None req-4289c980-b115-42c0-bc9a-4e2aac873b49 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: bee2e1c1-5803-419e-9606-24b1d1abcd52] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 673.274806] env[68233]: DEBUG nova.compute.manager [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Received event network-vif-plugged-993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 673.274806] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] Acquiring lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.274806] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.274998] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.275225] env[68233]: DEBUG nova.compute.manager [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] No waiting events found dispatching network-vif-plugged-993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 673.275428] env[68233]: WARNING nova.compute.manager [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Received unexpected event network-vif-plugged-993df01a-288b-4332-8976-cf2b951c477a for instance with vm_state building and task_state spawning. [ 673.275623] env[68233]: DEBUG nova.compute.manager [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Received event network-changed-993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 673.275808] env[68233]: DEBUG nova.compute.manager [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Refreshing instance network info cache due to event network-changed-993df01a-288b-4332-8976-cf2b951c477a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 673.276044] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] Acquiring lock "refresh_cache-6ae76b0f-7df2-4652-b4c3-92c16ed487a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.276222] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] Acquired lock "refresh_cache-6ae76b0f-7df2-4652-b4c3-92c16ed487a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.276442] env[68233]: DEBUG nova.network.neutron [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Refreshing network info cache for port 993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 673.320199] env[68233]: DEBUG nova.scheduler.client.report [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 673.398224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.484543] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781955, 'name': ReconfigVM_Task, 'duration_secs': 0.882607} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.484915] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 769956c6-7824-41db-9779-fc1b5f53dd94/769956c6-7824-41db-9779-fc1b5f53dd94.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.485465] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a500d991-e25e-4b12-ba34-96d713154746 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.491989] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 673.491989] env[68233]: value = "task-2781959" [ 673.491989] env[68233]: _type = "Task" [ 673.491989] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.504158] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781959, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.535228] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781958, 'name': CreateVM_Task, 'duration_secs': 0.41368} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.535480] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.536408] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.536674] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.537061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.537380] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c49ba21-9559-4942-8a26-6129b1808cf6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.543958] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 673.543958] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d7b72-fcd3-9bd5-1ac1-be62511bb574" [ 673.543958] env[68233]: _type = "Task" [ 673.543958] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.554991] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d7b72-fcd3-9bd5-1ac1-be62511bb574, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.641576] env[68233]: DEBUG nova.compute.manager [None req-4289c980-b115-42c0-bc9a-4e2aac873b49 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: bee2e1c1-5803-419e-9606-24b1d1abcd52] Instance disappeared before build. {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 673.833018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.679s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 673.833018] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 673.836837] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.103s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.838444] env[68233]: INFO nova.compute.claims [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 673.937720] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "abdf9de2-8563-4a31-91a3-0c18b0387533" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.937996] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.003066] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781959, 'name': Rename_Task, 'duration_secs': 0.208055} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.003857] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.004380] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afd173d5-eb33-4538-8989-fb07ca6b540d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.015178] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 674.015178] env[68233]: value = "task-2781960" [ 674.015178] env[68233]: _type = "Task" [ 674.015178] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.023454] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781960, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.053833] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d7b72-fcd3-9bd5-1ac1-be62511bb574, 'name': SearchDatastore_Task, 'duration_secs': 0.013706} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.054146] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.054396] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 674.054651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.054804] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 674.054991] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 674.055295] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17d90bae-3053-44c7-b18d-417018e5a056 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.063924] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 674.064147] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 674.064862] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71d5350c-0de0-4e2d-a105-ef2744f79c77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.072196] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 674.072196] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522bc09b-311b-6ac7-07a3-76a822ac9751" [ 674.072196] env[68233]: _type = "Task" [ 674.072196] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.078115] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522bc09b-311b-6ac7-07a3-76a822ac9751, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.130562] env[68233]: DEBUG nova.network.neutron [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Updated VIF entry in instance network info cache for port 993df01a-288b-4332-8976-cf2b951c477a. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 674.130938] env[68233]: DEBUG nova.network.neutron [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Updating instance_info_cache with network_info: [{"id": "993df01a-288b-4332-8976-cf2b951c477a", "address": "fa:16:3e:3c:75:98", "network": {"id": "bdac9594-bd2c-4e15-9447-7ba27e417630", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-315423907-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "514e26678c904a209c56a7d55b093df9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4576b9d4-535c-40aa-b078-246f671f216e", "external-id": "nsx-vlan-transportzone-27", "segmentation_id": 27, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993df01a-28", "ovs_interfaceid": "993df01a-288b-4332-8976-cf2b951c477a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.156405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4289c980-b115-42c0-bc9a-4e2aac873b49 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "bee2e1c1-5803-419e-9606-24b1d1abcd52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.039s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.345048] env[68233]: DEBUG nova.compute.utils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 674.347171] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 674.347392] env[68233]: DEBUG nova.network.neutron [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 674.384834] env[68233]: DEBUG nova.policy [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ff2dd2bd88e454495b02259b5728894', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c98b110be96f495ab5ef126a45b8328e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 674.526606] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781960, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.583692] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522bc09b-311b-6ac7-07a3-76a822ac9751, 'name': SearchDatastore_Task, 'duration_secs': 0.008493} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.584717] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-702a02f2-f6f8-44d8-8d05-5e232960f250 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.591162] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 674.591162] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523640a9-371b-b5ac-ef45-bada1d98d8fb" [ 674.591162] env[68233]: _type = "Task" [ 674.591162] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.603244] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523640a9-371b-b5ac-ef45-bada1d98d8fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.633861] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb0fa220-618a-4be8-986d-32e7d64170e9 req-06c9f2ee-d5dc-4263-abc2-942dcb913d7b service nova] Releasing lock "refresh_cache-6ae76b0f-7df2-4652-b4c3-92c16ed487a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.658788] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 674.850767] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 674.869531] env[68233]: DEBUG nova.network.neutron [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Successfully created port: 436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 674.955148] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.955148] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-39e34bff-479b-46c7-8aea-f9bb6a521f90 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.962080] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 674.962080] env[68233]: value = "task-2781961" [ 674.962080] env[68233]: _type = "Task" [ 674.962080] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.972978] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.032879] env[68233]: DEBUG oslo_vmware.api [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781960, 'name': PowerOnVM_Task, 'duration_secs': 0.689404} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.033153] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.033358] env[68233]: INFO nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Took 9.43 seconds to spawn the instance on the hypervisor. [ 675.033554] env[68233]: DEBUG nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.034410] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d0904f-de6e-4797-9ae7-35e5c9d359cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.107486] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523640a9-371b-b5ac-ef45-bada1d98d8fb, 'name': SearchDatastore_Task, 'duration_secs': 0.009938} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.110414] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 675.110773] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6ae76b0f-7df2-4652-b4c3-92c16ed487a1/6ae76b0f-7df2-4652-b4c3-92c16ed487a1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 675.111242] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac98b2fd-7cab-4b6b-9af3-b16638cf1e79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.119581] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 675.119581] env[68233]: value = "task-2781962" [ 675.119581] env[68233]: _type = "Task" [ 675.119581] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.132681] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.186995] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.474230] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781961, 'name': PowerOffVM_Task, 'duration_secs': 0.229955} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.477519] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 675.478666] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5240733-aa3d-48c8-9032-bf6fea5a55bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.505457] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94776fad-016e-434d-a15c-29bff103e2ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.510230] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d439a12b-df5f-4a08-b9da-b6419685e236 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.527199] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd33a94-01f2-4890-b442-a6ba80849297 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.570136] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c54a0e6-24c2-44ac-8a4d-0e2fc531fe3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.574720] env[68233]: INFO nova.compute.manager [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Took 44.54 seconds to build instance. [ 675.581228] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5654731-5dac-4476-b5d8-ee3faa91d13b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.591264] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 675.591264] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc06e0d4-1865-4b46-9298-eb9f642d9f56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.600807] env[68233]: DEBUG nova.compute.provider_tree [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.604269] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 675.604269] env[68233]: value = "task-2781963" [ 675.604269] env[68233]: _type = "Task" [ 675.604269] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.614403] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 675.614403] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 675.614676] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.614676] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 675.614768] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 675.615468] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a222023-7196-4c79-aa5e-25869cedb74f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.625318] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 675.625466] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 675.627023] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe8c1ca5-09ab-4619-9064-f460e58b5e71 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.633429] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503547} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.633483] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6ae76b0f-7df2-4652-b4c3-92c16ed487a1/6ae76b0f-7df2-4652-b4c3-92c16ed487a1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.633682] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.634312] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d505502f-9b18-4958-bdcc-409aa13051dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.637369] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 675.637369] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527a763f-d8fb-2252-592e-c6be77397309" [ 675.637369] env[68233]: _type = "Task" [ 675.637369] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.642131] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 675.642131] env[68233]: value = "task-2781964" [ 675.642131] env[68233]: _type = "Task" [ 675.642131] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.648300] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527a763f-d8fb-2252-592e-c6be77397309, 'name': SearchDatastore_Task, 'duration_secs': 0.009146} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.649788] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4991e3c-9feb-4047-9049-013b83722e4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.654940] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781964, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.657775] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 675.657775] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528288df-4b59-0bb8-68ae-2e2ff5192c95" [ 675.657775] env[68233]: _type = "Task" [ 675.657775] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.665807] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528288df-4b59-0bb8-68ae-2e2ff5192c95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.878584] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 675.903295] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 675.903547] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.903704] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 675.903886] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.904046] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 675.904197] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 675.904408] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 675.904568] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 675.904738] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 675.904901] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 675.905109] env[68233]: DEBUG nova.virt.hardware [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 675.905960] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da38a9d-c209-4de9-a0bf-6352f05423de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.914349] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238d9e08-518f-43ab-9fec-e3ac84e149f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.076515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-766ae357-2a39-40a6-8eb9-27ef4769c059 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "769956c6-7824-41db-9779-fc1b5f53dd94" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.394s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.105595] env[68233]: DEBUG nova.scheduler.client.report [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.152955] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781964, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074696} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.153315] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.155904] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96105e96-6fd2-43de-b18a-605e2c8564e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.179744] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] 6ae76b0f-7df2-4652-b4c3-92c16ed487a1/6ae76b0f-7df2-4652-b4c3-92c16ed487a1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.180543] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d38e3598-9e27-41e2-9bc6-60d11ca62c06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.199473] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528288df-4b59-0bb8-68ae-2e2ff5192c95, 'name': SearchDatastore_Task, 'duration_secs': 0.011645} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.200207] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 676.200590] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. {{(pid=68233) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 676.200976] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c142a6df-9c2c-4941-8000-4b1a1201d459 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.204933] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 676.204933] env[68233]: value = "task-2781965" [ 676.204933] env[68233]: _type = "Task" [ 676.204933] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.209443] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 676.209443] env[68233]: value = "task-2781966" [ 676.209443] env[68233]: _type = "Task" [ 676.209443] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.216418] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781965, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.221373] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781966, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.579250] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 676.611948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.777s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.612952] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 676.618593] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.588s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.621344] env[68233]: INFO nova.compute.claims [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.718386] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781965, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.726996] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781966, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496295} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.727482] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. [ 676.728372] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905a19b2-b0e3-43ea-a62f-292119688c02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.759676] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.760403] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8714a22-2151-4b9d-9063-a0326d8aa388 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.779600] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 676.779600] env[68233]: value = "task-2781967" [ 676.779600] env[68233]: _type = "Task" [ 676.779600] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.788973] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781967, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.916176] env[68233]: DEBUG nova.network.neutron [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Successfully updated port: 436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 677.109890] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.134629] env[68233]: DEBUG nova.compute.utils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 677.135309] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 677.135475] env[68233]: DEBUG nova.network.neutron [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 677.166955] env[68233]: DEBUG nova.compute.manager [req-6cb9a61f-ebad-45a0-9d6e-a50ce398a64b req-fee7bfb5-e29f-4fa3-b049-aac8e07f0ba2 service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Received event network-vif-plugged-436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 677.167239] env[68233]: DEBUG oslo_concurrency.lockutils [req-6cb9a61f-ebad-45a0-9d6e-a50ce398a64b req-fee7bfb5-e29f-4fa3-b049-aac8e07f0ba2 service nova] Acquiring lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.167845] env[68233]: DEBUG oslo_concurrency.lockutils [req-6cb9a61f-ebad-45a0-9d6e-a50ce398a64b req-fee7bfb5-e29f-4fa3-b049-aac8e07f0ba2 service nova] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.170766] env[68233]: DEBUG oslo_concurrency.lockutils [req-6cb9a61f-ebad-45a0-9d6e-a50ce398a64b req-fee7bfb5-e29f-4fa3-b049-aac8e07f0ba2 service nova] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.170766] env[68233]: DEBUG nova.compute.manager [req-6cb9a61f-ebad-45a0-9d6e-a50ce398a64b req-fee7bfb5-e29f-4fa3-b049-aac8e07f0ba2 service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] No waiting events found dispatching network-vif-plugged-436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 677.170766] env[68233]: WARNING nova.compute.manager [req-6cb9a61f-ebad-45a0-9d6e-a50ce398a64b req-fee7bfb5-e29f-4fa3-b049-aac8e07f0ba2 service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Received unexpected event network-vif-plugged-436ed418-46de-465b-920d-6fddf7ec041d for instance with vm_state building and task_state spawning. [ 677.197088] env[68233]: DEBUG nova.policy [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37e030e37f11497fbf606a73dac8b586', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c687afa1e6a546ce8da85e20638c816b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 677.215607] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781965, 'name': ReconfigVM_Task, 'duration_secs': 0.58953} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.215892] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Reconfigured VM instance instance-0000001a to attach disk [datastore2] 6ae76b0f-7df2-4652-b4c3-92c16ed487a1/6ae76b0f-7df2-4652-b4c3-92c16ed487a1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.217669] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aabca6aa-b255-4e91-a77d-a34b6bf10122 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.224571] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 677.224571] env[68233]: value = "task-2781968" [ 677.224571] env[68233]: _type = "Task" [ 677.224571] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.233581] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781968, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.294639] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781967, 'name': ReconfigVM_Task, 'duration_secs': 0.364415} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.295454] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 87385201-3118-4a8e-9739-db3b431566c5/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.296600] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92c59d9-c634-454f-9405-3947d4b4bbf9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.327517] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9de370a6-2863-44b9-bac9-69ded1eb7c8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.344080] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 677.344080] env[68233]: value = "task-2781969" [ 677.344080] env[68233]: _type = "Task" [ 677.344080] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.354255] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781969, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.388020] env[68233]: DEBUG nova.compute.manager [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.388020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0ff9e1-8339-4e5a-af9c-c5d8d0dc623e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.417877] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.418049] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquired lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.418207] env[68233]: DEBUG nova.network.neutron [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.636782] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 677.737561] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781968, 'name': Rename_Task, 'duration_secs': 0.155146} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.737850] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.738119] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd3333e0-e619-4007-9207-2fd603a6ad9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.749367] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 677.749367] env[68233]: value = "task-2781970" [ 677.749367] env[68233]: _type = "Task" [ 677.749367] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.753919] env[68233]: DEBUG nova.network.neutron [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Successfully created port: 786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.762546] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.855263] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781969, 'name': ReconfigVM_Task, 'duration_secs': 0.16437} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.858606] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 677.858942] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-959ff4d4-ebb4-4f67-b0ec-c07497aa9d7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.867555] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 677.867555] env[68233]: value = "task-2781971" [ 677.867555] env[68233]: _type = "Task" [ 677.867555] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.877193] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781971, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.900583] env[68233]: INFO nova.compute.manager [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] instance snapshotting [ 677.907897] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951a1ec5-56eb-4470-9123-1349af07a0b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.939921] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a4abf0-79ad-4b1f-88fe-e7f8b90a1ec2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.986699] env[68233]: DEBUG nova.network.neutron [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.173259] env[68233]: DEBUG nova.network.neutron [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Updating instance_info_cache with network_info: [{"id": "436ed418-46de-465b-920d-6fddf7ec041d", "address": "fa:16:3e:b3:33:82", "network": {"id": "7cfaf07f-a0d1-4eb5-9524-24a3604eeaa7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-614869909-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c98b110be96f495ab5ef126a45b8328e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d062877-0b23-4965-908b-f585f25f3bf1", "external-id": "nsx-vlan-transportzone-523", "segmentation_id": 523, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap436ed418-46", "ovs_interfaceid": "436ed418-46de-465b-920d-6fddf7ec041d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.268333] env[68233]: DEBUG oslo_vmware.api [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781970, 'name': PowerOnVM_Task, 'duration_secs': 0.465507} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.268333] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.268333] env[68233]: INFO nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Took 8.44 seconds to spawn the instance on the hypervisor. [ 678.268505] env[68233]: DEBUG nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.269403] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ebbada-2759-445e-b336-35bdd8887dc8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.319165] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9246fcf4-32b5-44ec-a7ce-9e3f365c435a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.329863] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4891be4-abcc-46be-b878-3e0967314680 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.362955] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b1e38b-2abe-4da2-8538-c76ff72f87dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.375541] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a63ff3-4e7d-44cc-8a27-94a9a18faf9f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.385411] env[68233]: DEBUG oslo_vmware.api [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781971, 'name': PowerOnVM_Task, 'duration_secs': 0.428656} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.386191] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.395701] env[68233]: DEBUG nova.compute.provider_tree [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.399732] env[68233]: DEBUG nova.compute.manager [None req-9dc56e92-6603-483e-97b4-51ad4d964d03 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.399732] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5bea51-1f3a-450a-853b-9421132b27c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.452978] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 678.453335] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a7eeb190-5bf1-46f2-9fc2-761264dbd188 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.460867] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 678.460867] env[68233]: value = "task-2781972" [ 678.460867] env[68233]: _type = "Task" [ 678.460867] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.469524] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781972, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.545010] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.545318] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.651601] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 678.671621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Releasing lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 678.672247] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Instance network_info: |[{"id": "436ed418-46de-465b-920d-6fddf7ec041d", "address": "fa:16:3e:b3:33:82", "network": {"id": "7cfaf07f-a0d1-4eb5-9524-24a3604eeaa7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-614869909-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c98b110be96f495ab5ef126a45b8328e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d062877-0b23-4965-908b-f585f25f3bf1", "external-id": "nsx-vlan-transportzone-523", "segmentation_id": 523, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap436ed418-46", "ovs_interfaceid": "436ed418-46de-465b-920d-6fddf7ec041d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 678.674606] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:33:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d062877-0b23-4965-908b-f585f25f3bf1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '436ed418-46de-465b-920d-6fddf7ec041d', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 678.682647] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Creating folder: Project (c98b110be96f495ab5ef126a45b8328e). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.683551] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f216fc26-5616-48cc-931c-e2217dc440a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.692925] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 678.693320] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.693387] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 678.693519] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.693663] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 678.693812] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 678.694076] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 678.694264] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 678.694437] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 678.694604] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 678.694781] env[68233]: DEBUG nova.virt.hardware [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 678.696191] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90096ab4-0597-4482-9042-926b32bfe6ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.700443] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Created folder: Project (c98b110be96f495ab5ef126a45b8328e) in parent group-v559223. [ 678.700629] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Creating folder: Instances. Parent ref: group-v559304. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 678.701351] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7901b3d-6071-4730-96d3-d56c0c9101a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.708078] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d279a2-bca6-474d-9347-dee568fe985b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.713916] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Created folder: Instances in parent group-v559304. [ 678.714177] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.721693] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 678.722426] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f56492c-c5d6-4a4d-8530-226d32d54f69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.746588] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.746588] env[68233]: value = "task-2781975" [ 678.746588] env[68233]: _type = "Task" [ 678.746588] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.753548] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781975, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.791602] env[68233]: INFO nova.compute.manager [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Took 45.39 seconds to build instance. [ 678.902923] env[68233]: DEBUG nova.scheduler.client.report [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.971371] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781972, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.213151] env[68233]: DEBUG nova.compute.manager [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Received event network-changed-436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 679.213351] env[68233]: DEBUG nova.compute.manager [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Refreshing instance network info cache due to event network-changed-436ed418-46de-465b-920d-6fddf7ec041d. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 679.213894] env[68233]: DEBUG oslo_concurrency.lockutils [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] Acquiring lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.214215] env[68233]: DEBUG oslo_concurrency.lockutils [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] Acquired lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.214417] env[68233]: DEBUG nova.network.neutron [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Refreshing network info cache for port 436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 679.256686] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781975, 'name': CreateVM_Task, 'duration_secs': 0.333403} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.256880] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.258059] env[68233]: DEBUG oslo_vmware.service [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7727fb-6317-450e-a1e8-81d494ff1116 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.265448] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.265727] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.266221] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.266487] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9839d24-d48a-40a8-b220-42497bf16436 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.270763] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 679.270763] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52607577-4947-9f80-22a6-e00f03b4d075" [ 679.270763] env[68233]: _type = "Task" [ 679.270763] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.278520] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52607577-4947-9f80-22a6-e00f03b4d075, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.293904] env[68233]: DEBUG oslo_concurrency.lockutils [None req-95d80a37-5b6f-4057-b3df-ceafd1f35ffc tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.842s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.303259] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.303597] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.303876] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.304836] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.304836] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.307325] env[68233]: INFO nova.compute.manager [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Terminating instance [ 679.407426] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.789s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.408083] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 679.410803] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.409s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.412445] env[68233]: DEBUG nova.objects.instance [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lazy-loading 'resources' on Instance uuid 90d88fcb-6141-499c-b049-ddfc9e210d5c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 679.477454] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781972, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.676410] env[68233]: DEBUG nova.network.neutron [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Successfully updated port: 786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 679.781179] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.781432] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.781677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.781829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.782067] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.782294] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ddb147c-db90-494d-9df5-9781bf40a4cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.790867] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.791057] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.791823] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c989b8b7-676e-419c-a73f-da1a0c6c83c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.795979] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 679.802036] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34501da9-cf36-4fc1-9159-6ac8adcc5ddf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.809341] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 679.809341] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fc6d17-ca26-e93d-76af-3626a88e727c" [ 679.809341] env[68233]: _type = "Task" [ 679.809341] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.813670] env[68233]: DEBUG nova.compute.manager [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 679.813883] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 679.814188] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fc6d17-ca26-e93d-76af-3626a88e727c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.814898] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f3a9a9-94ed-4bc3-986d-5e18940a9f64 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.821778] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 679.822082] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8d09691-165c-4d3d-bec5-1657ff894dc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.827717] env[68233]: DEBUG oslo_vmware.api [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 679.827717] env[68233]: value = "task-2781976" [ 679.827717] env[68233]: _type = "Task" [ 679.827717] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.835517] env[68233]: DEBUG oslo_vmware.api [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781976, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.916593] env[68233]: DEBUG nova.compute.utils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 679.918461] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 679.918461] env[68233]: DEBUG nova.network.neutron [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 679.973292] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781972, 'name': CreateSnapshot_Task, 'duration_secs': 1.128962} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.973824] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 679.975143] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d11176-9960-4c3c-9ab0-bcce6830e0cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.997247] env[68233]: DEBUG nova.policy [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba723ede802e47adad4d18cef7f5da88', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '548b84c71d3d4c2e9f13912fb8ac0e90', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 680.107392] env[68233]: INFO nova.compute.manager [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Unrescuing [ 680.107827] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.108114] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.108793] env[68233]: DEBUG nova.network.neutron [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.178260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "refresh_cache-88d67405-b8c6-484a-b178-68a8babb3708" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.181020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquired lock "refresh_cache-88d67405-b8c6-484a-b178-68a8babb3708" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.181020] env[68233]: DEBUG nova.network.neutron [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.237941] env[68233]: DEBUG nova.network.neutron [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Updated VIF entry in instance network info cache for port 436ed418-46de-465b-920d-6fddf7ec041d. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 680.238325] env[68233]: DEBUG nova.network.neutron [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Updating instance_info_cache with network_info: [{"id": "436ed418-46de-465b-920d-6fddf7ec041d", "address": "fa:16:3e:b3:33:82", "network": {"id": "7cfaf07f-a0d1-4eb5-9524-24a3604eeaa7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-614869909-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c98b110be96f495ab5ef126a45b8328e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d062877-0b23-4965-908b-f585f25f3bf1", "external-id": "nsx-vlan-transportzone-523", "segmentation_id": 523, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap436ed418-46", "ovs_interfaceid": "436ed418-46de-465b-920d-6fddf7ec041d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.323819] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 680.324067] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Creating directory with path [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 680.325095] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f9cfa95-7736-4188-9883-db5985e7db8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.327967] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.337024] env[68233]: DEBUG oslo_vmware.api [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781976, 'name': PowerOffVM_Task, 'duration_secs': 0.19404} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.337024] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 680.337542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 680.337872] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fac5ab8-35d5-4af0-966b-ca3d53523933 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.340434] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Created directory with path [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 680.340619] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Fetch image to [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 680.340810] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Downloading image file data da133fda-e1e2-42a1-a7e0-b8b1426a8490 to [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk on the data store datastore1 {{(pid=68233) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 680.341870] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374b8d25-8a5f-4c46-a498-f75a8ac6b79c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.353515] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932212af-0f16-4e25-8a10-414fea5b2948 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.366957] env[68233]: DEBUG nova.network.neutron [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Successfully created port: 489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.370280] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d71a7f5-bc20-4085-a9d3-86aae5e60712 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.412861] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e73b6e-1f0f-4145-827b-2c016812c616 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.415645] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 680.415854] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 680.416051] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Deleting the datastore file [datastore2] 6ae76b0f-7df2-4652-b4c3-92c16ed487a1 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 680.416621] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-69376a90-1df1-4026-8a11-e9135523b51f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.423554] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 680.429785] env[68233]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c2938cc4-1388-48b2-865a-db1f147f31e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.430527] env[68233]: DEBUG oslo_vmware.api [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for the task: (returnval){ [ 680.430527] env[68233]: value = "task-2781978" [ 680.430527] env[68233]: _type = "Task" [ 680.430527] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.439045] env[68233]: DEBUG oslo_vmware.api [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781978, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.497159] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 680.500941] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e4f2a1b7-63a6-418c-925e-7846fe448f43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.509528] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 680.509528] env[68233]: value = "task-2781979" [ 680.509528] env[68233]: _type = "Task" [ 680.509528] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.520758] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781979, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.526590] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Downloading image file data da133fda-e1e2-42a1-a7e0-b8b1426a8490 to the data store datastore1 {{(pid=68233) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 680.557392] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f090996b-0145-41f5-95d6-4ae035cd3b8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.564232] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db67ae97-1a99-4ec9-9344-b23622c81c85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.598795] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cf3ab3-cc10-45d5-9815-de171c527b59 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.607379] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38095a0-82de-4953-a3d8-16c789c20499 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.624425] env[68233]: DEBUG nova.compute.provider_tree [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.627528] env[68233]: DEBUG oslo_vmware.rw_handles [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 680.730846] env[68233]: DEBUG nova.network.neutron [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.741465] env[68233]: DEBUG oslo_concurrency.lockutils [req-932551d8-1f26-4719-8f28-fb7d98f439ae req-5ce855fe-4d26-490e-98a1-ae0472663f8e service nova] Releasing lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 680.900772] env[68233]: DEBUG nova.network.neutron [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Updating instance_info_cache with network_info: [{"id": "786586a9-c15b-4009-9c4b-ce3b65a85ea1", "address": "fa:16:3e:c9:22:3b", "network": {"id": "4ab8970c-ca69-4aa3-a3de-9bb927e59a0e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-44374739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c687afa1e6a546ce8da85e20638c816b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap786586a9-c1", "ovs_interfaceid": "786586a9-c15b-4009-9c4b-ce3b65a85ea1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.953390] env[68233]: DEBUG oslo_vmware.api [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Task: {'id': task-2781978, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18163} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.956541] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 680.956782] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 680.957038] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 680.957280] env[68233]: INFO nova.compute.manager [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 680.957926] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 680.957926] env[68233]: DEBUG nova.compute.manager [-] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 680.957926] env[68233]: DEBUG nova.network.neutron [-] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.021047] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781979, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.030347] env[68233]: DEBUG nova.network.neutron [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Updating instance_info_cache with network_info: [{"id": "a38db034-3553-49b5-afdc-1b75d897f720", "address": "fa:16:3e:c1:7e:7e", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa38db034-35", "ovs_interfaceid": "a38db034-3553-49b5-afdc-1b75d897f720", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.129760] env[68233]: DEBUG nova.scheduler.client.report [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 681.296333] env[68233]: DEBUG nova.compute.manager [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Received event network-vif-plugged-786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.296638] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] Acquiring lock "88d67405-b8c6-484a-b178-68a8babb3708-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.296801] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] Lock "88d67405-b8c6-484a-b178-68a8babb3708-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.297007] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] Lock "88d67405-b8c6-484a-b178-68a8babb3708-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.297244] env[68233]: DEBUG nova.compute.manager [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] No waiting events found dispatching network-vif-plugged-786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.297428] env[68233]: WARNING nova.compute.manager [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Received unexpected event network-vif-plugged-786586a9-c15b-4009-9c4b-ce3b65a85ea1 for instance with vm_state building and task_state spawning. [ 681.297624] env[68233]: DEBUG nova.compute.manager [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Received event network-changed-786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.297744] env[68233]: DEBUG nova.compute.manager [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Refreshing instance network info cache due to event network-changed-786586a9-c15b-4009-9c4b-ce3b65a85ea1. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 681.297916] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] Acquiring lock "refresh_cache-88d67405-b8c6-484a-b178-68a8babb3708" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.313709] env[68233]: DEBUG nova.compute.manager [req-0e488857-a5ff-44b4-9f73-0c3f6373e9bb req-ee80c82c-c98d-4f4f-98ec-6125b0d73b5d service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Received event network-vif-deleted-993df01a-288b-4332-8976-cf2b951c477a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 681.313917] env[68233]: INFO nova.compute.manager [req-0e488857-a5ff-44b4-9f73-0c3f6373e9bb req-ee80c82c-c98d-4f4f-98ec-6125b0d73b5d service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Neutron deleted interface 993df01a-288b-4332-8976-cf2b951c477a; detaching it from the instance and deleting it from the info cache [ 681.314106] env[68233]: DEBUG nova.network.neutron [req-0e488857-a5ff-44b4-9f73-0c3f6373e9bb req-ee80c82c-c98d-4f4f-98ec-6125b0d73b5d service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.336054] env[68233]: DEBUG oslo_vmware.rw_handles [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 681.336269] env[68233]: DEBUG oslo_vmware.rw_handles [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 681.403458] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Releasing lock "refresh_cache-88d67405-b8c6-484a-b178-68a8babb3708" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.403829] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Instance network_info: |[{"id": "786586a9-c15b-4009-9c4b-ce3b65a85ea1", "address": "fa:16:3e:c9:22:3b", "network": {"id": "4ab8970c-ca69-4aa3-a3de-9bb927e59a0e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-44374739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c687afa1e6a546ce8da85e20638c816b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap786586a9-c1", "ovs_interfaceid": "786586a9-c15b-4009-9c4b-ce3b65a85ea1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 681.404144] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] Acquired lock "refresh_cache-88d67405-b8c6-484a-b178-68a8babb3708" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.404327] env[68233]: DEBUG nova.network.neutron [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Refreshing network info cache for port 786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.405560] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:22:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7908211b-df93-467b-87a8-3c3d29b03de6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '786586a9-c15b-4009-9c4b-ce3b65a85ea1', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 681.413449] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Creating folder: Project (c687afa1e6a546ce8da85e20638c816b). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.414375] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ad34814-ffe4-4a40-ac11-7e71384867f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.427783] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Created folder: Project (c687afa1e6a546ce8da85e20638c816b) in parent group-v559223. [ 681.427982] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Creating folder: Instances. Parent ref: group-v559309. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.428235] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-badd45d9-75ce-4e7b-9947-a8c9d61440b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.436786] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 681.440015] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Created folder: Instances in parent group-v559309. [ 681.440198] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 681.440378] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 681.440579] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d887353d-7007-4cfc-9752-53edca80f103 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.455707] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Downloaded image file data da133fda-e1e2-42a1-a7e0-b8b1426a8490 to vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk on the data store datastore1 {{(pid=68233) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 681.457660] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 681.457912] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Copying Virtual Disk [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk to [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.458979] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41ec2a76-4f80-4d84-b2cc-43118669ec5e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.466965] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 681.466965] env[68233]: value = "task-2781983" [ 681.466965] env[68233]: _type = "Task" [ 681.466965] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.469851] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 681.470094] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.470253] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 681.470438] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.470581] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 681.470731] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 681.470976] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 681.471182] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 681.471377] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 681.471555] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 681.471756] env[68233]: DEBUG nova.virt.hardware [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 681.472199] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 681.472199] env[68233]: value = "task-2781982" [ 681.472199] env[68233]: _type = "Task" [ 681.472199] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.472904] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aea7a1e-a28b-42c4-9125-b6b700413c8f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.491435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa851f4-c54e-4b41-8086-87cf74397875 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.495070] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781983, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.495334] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.518530] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781979, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.533401] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-87385201-3118-4a8e-9739-db3b431566c5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.534116] env[68233]: DEBUG nova.objects.instance [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'flavor' on Instance uuid 87385201-3118-4a8e-9739-db3b431566c5 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 681.636133] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.225s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.638741] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 30.292s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.638928] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.639090] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 681.639470] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.641112] env[68233]: INFO nova.compute.claims [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.644451] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ff551b-531c-4017-b098-479a16e14fef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.654037] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56e61ae-35b3-45da-85f4-3b51691eeffd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.674423] env[68233]: INFO nova.scheduler.client.report [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleted allocations for instance 90d88fcb-6141-499c-b049-ddfc9e210d5c [ 681.674901] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae16a2f-79c2-4483-a778-01a0336517c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.686019] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097e5f62-132b-465f-b06a-4cfa09c9c16e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.717890] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179878MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 681.717972] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.750550] env[68233]: DEBUG nova.network.neutron [-] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.816561] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f65c192-1c90-4fc3-8e5f-17219b600837 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.826303] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f06e45-3d56-4772-83b0-1766764c6dae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.858838] env[68233]: DEBUG nova.compute.manager [req-0e488857-a5ff-44b4-9f73-0c3f6373e9bb req-ee80c82c-c98d-4f4f-98ec-6125b0d73b5d service nova] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Detach interface failed, port_id=993df01a-288b-4332-8976-cf2b951c477a, reason: Instance 6ae76b0f-7df2-4652-b4c3-92c16ed487a1 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 681.982272] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781983, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.990554] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781982, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.022457] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781979, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.025014] env[68233]: DEBUG nova.network.neutron [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Successfully updated port: 489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 682.045885] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7519b58-bd72-4484-89d7-9ce7d603c8df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.071193] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 682.071563] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9819e0f6-19ab-4627-a9b2-60cb84c14304 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.080540] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 682.080540] env[68233]: value = "task-2781984" [ 682.080540] env[68233]: _type = "Task" [ 682.080540] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.091483] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781984, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.184105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab80a9ab-3b7b-47b5-accf-e946aec02956 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "90d88fcb-6141-499c-b049-ddfc9e210d5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.913s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.202962] env[68233]: DEBUG nova.network.neutron [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Updated VIF entry in instance network info cache for port 786586a9-c15b-4009-9c4b-ce3b65a85ea1. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 682.203130] env[68233]: DEBUG nova.network.neutron [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Updating instance_info_cache with network_info: [{"id": "786586a9-c15b-4009-9c4b-ce3b65a85ea1", "address": "fa:16:3e:c9:22:3b", "network": {"id": "4ab8970c-ca69-4aa3-a3de-9bb927e59a0e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-44374739-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c687afa1e6a546ce8da85e20638c816b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7908211b-df93-467b-87a8-3c3d29b03de6", "external-id": "nsx-vlan-transportzone-632", "segmentation_id": 632, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap786586a9-c1", "ovs_interfaceid": "786586a9-c15b-4009-9c4b-ce3b65a85ea1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.254369] env[68233]: INFO nova.compute.manager [-] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Took 1.30 seconds to deallocate network for instance. [ 682.482274] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781983, 'name': CreateVM_Task, 'duration_secs': 0.852175} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.485262] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 682.486071] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.486729] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.486729] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 682.487320] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745920ad-424b-4532-bc51-a53f2cdc4f3d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.492110] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695422} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.492698] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Copied Virtual Disk [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk to [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 682.492907] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleting the datastore file [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490/tmp-sparse.vmdk {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 682.493125] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-312d43b5-44c2-4cf7-b4cc-3e74f2e950cd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.495799] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 682.495799] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c435c5-6423-19f7-77b5-e3bd3fbe2741" [ 682.495799] env[68233]: _type = "Task" [ 682.495799] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.500342] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 682.500342] env[68233]: value = "task-2781985" [ 682.500342] env[68233]: _type = "Task" [ 682.500342] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.503288] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c435c5-6423-19f7-77b5-e3bd3fbe2741, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.511832] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.519395] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2781979, 'name': CloneVM_Task, 'duration_secs': 1.626853} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.519623] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Created linked-clone VM from snapshot [ 682.520435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02550a88-ce0a-47fb-a22a-81109fbbb1cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.528025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "refresh_cache-636b6b36-3ab5-4851-a232-d27b54895595" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.528025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquired lock "refresh_cache-636b6b36-3ab5-4851-a232-d27b54895595" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.528180] env[68233]: DEBUG nova.network.neutron [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.529459] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Uploading image ad54b4da-5373-4eb1-bcc7-b5685fe04812 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 682.554536] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 682.554536] env[68233]: value = "vm-559308" [ 682.554536] env[68233]: _type = "VirtualMachine" [ 682.554536] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 682.554820] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-251cf52e-ebf9-4613-84c7-519eb66fd342 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.562743] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lease: (returnval){ [ 682.562743] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528de215-ce78-c324-d310-d5da9e4f37df" [ 682.562743] env[68233]: _type = "HttpNfcLease" [ 682.562743] env[68233]: } obtained for exporting VM: (result){ [ 682.562743] env[68233]: value = "vm-559308" [ 682.562743] env[68233]: _type = "VirtualMachine" [ 682.562743] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 682.563021] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the lease: (returnval){ [ 682.563021] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528de215-ce78-c324-d310-d5da9e4f37df" [ 682.563021] env[68233]: _type = "HttpNfcLease" [ 682.563021] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 682.571665] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 682.571665] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528de215-ce78-c324-d310-d5da9e4f37df" [ 682.571665] env[68233]: _type = "HttpNfcLease" [ 682.571665] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 682.591787] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781984, 'name': PowerOffVM_Task, 'duration_secs': 0.30402} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.592064] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 682.597291] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 682.597564] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d3d2f57-a486-4b06-8d85-ce1bebc1cf46 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.615950] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 682.615950] env[68233]: value = "task-2781987" [ 682.615950] env[68233]: _type = "Task" [ 682.615950] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.624010] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.690208] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.690208] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.690484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 682.690719] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 682.690928] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 682.693143] env[68233]: INFO nova.compute.manager [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Terminating instance [ 682.705977] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8e50cc7-e01e-4b94-8c40-5d9d92cc786a req-9495c434-b2eb-490f-8e77-9328191a3d5e service nova] Releasing lock "refresh_cache-88d67405-b8c6-484a-b178-68a8babb3708" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.761236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.011977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.012268] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 683.012498] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.017558] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.030863} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.017734] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 683.017967] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Moving file from [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2/da133fda-e1e2-42a1-a7e0-b8b1426a8490 to [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490. {{(pid=68233) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 683.018307] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-e551fb23-ba4d-4b37-a432-bd74a4e49ba9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.026678] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 683.026678] env[68233]: value = "task-2781988" [ 683.026678] env[68233]: _type = "Task" [ 683.026678] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.036147] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781988, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.068841] env[68233]: DEBUG nova.network.neutron [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 683.072230] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 683.072230] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528de215-ce78-c324-d310-d5da9e4f37df" [ 683.072230] env[68233]: _type = "HttpNfcLease" [ 683.072230] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 683.072497] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 683.072497] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528de215-ce78-c324-d310-d5da9e4f37df" [ 683.072497] env[68233]: _type = "HttpNfcLease" [ 683.072497] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 683.073316] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2a7393-df67-456a-9423-24d201169764 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.081202] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52700ac9-3da1-ed2c-4026-db48bbeb5478/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 683.081387] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52700ac9-3da1-ed2c-4026-db48bbeb5478/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 683.156127] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781987, 'name': ReconfigVM_Task, 'duration_secs': 0.461328} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.156427] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 683.156595] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 683.156839] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a06bf13-1e52-4322-bc82-3c94742d028c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.166308] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 683.166308] env[68233]: value = "task-2781989" [ 683.166308] env[68233]: _type = "Task" [ 683.166308] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.173650] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0fa8880-6599-4f3e-93af-ebf9c525b52b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.178865] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781989, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.183367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270b4b1c-80c7-4f54-af39-483ccbef049f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.220065] env[68233]: DEBUG nova.compute.manager [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 683.220332] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 683.224364] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9053dc-50f8-4239-9c0a-c9f7ecd1992e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.227408] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42269f26-9036-4a86-abd0-d533792f4f02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.238114] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38d187e-148c-4aeb-a2fa-af5ac078703c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.241964] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 683.242194] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ac3d676-c9bb-43f0-9a6d-e501c77a393d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.253735] env[68233]: DEBUG nova.compute.provider_tree [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.256563] env[68233]: DEBUG oslo_vmware.api [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 683.256563] env[68233]: value = "task-2781990" [ 683.256563] env[68233]: _type = "Task" [ 683.256563] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.265281] env[68233]: DEBUG oslo_vmware.api [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781990, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.281537] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a42e4bbe-088a-4900-a942-e4195eae02a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.304352] env[68233]: DEBUG nova.network.neutron [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Updating instance_info_cache with network_info: [{"id": "489605bb-d528-4c3f-a258-3e2ff5d49913", "address": "fa:16:3e:66:65:d1", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap489605bb-d5", "ovs_interfaceid": "489605bb-d528-4c3f-a258-3e2ff5d49913", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.345079] env[68233]: DEBUG nova.compute.manager [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Received event network-vif-plugged-489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 683.345316] env[68233]: DEBUG oslo_concurrency.lockutils [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] Acquiring lock "636b6b36-3ab5-4851-a232-d27b54895595-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.345647] env[68233]: DEBUG oslo_concurrency.lockutils [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] Lock "636b6b36-3ab5-4851-a232-d27b54895595-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 683.345759] env[68233]: DEBUG oslo_concurrency.lockutils [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] Lock "636b6b36-3ab5-4851-a232-d27b54895595-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 683.346049] env[68233]: DEBUG nova.compute.manager [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] No waiting events found dispatching network-vif-plugged-489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 683.346049] env[68233]: WARNING nova.compute.manager [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Received unexpected event network-vif-plugged-489605bb-d528-4c3f-a258-3e2ff5d49913 for instance with vm_state building and task_state spawning. [ 683.346188] env[68233]: DEBUG nova.compute.manager [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Received event network-changed-489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 683.346433] env[68233]: DEBUG nova.compute.manager [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Refreshing instance network info cache due to event network-changed-489605bb-d528-4c3f-a258-3e2ff5d49913. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 683.346536] env[68233]: DEBUG oslo_concurrency.lockutils [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] Acquiring lock "refresh_cache-636b6b36-3ab5-4851-a232-d27b54895595" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.538024] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781988, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.040896} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.538564] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] File moved {{(pid=68233) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 683.538564] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Cleaning up location [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 683.538720] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleting the datastore file [datastore1] vmware_temp/7159e4e7-03d7-4630-a4a1-9d777242a4c2 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.539339] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b42c95f-5796-4243-94d7-9a3bc88d72c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.546043] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 683.546043] env[68233]: value = "task-2781991" [ 683.546043] env[68233]: _type = "Task" [ 683.546043] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.555057] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781991, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.677219] env[68233]: DEBUG oslo_vmware.api [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2781989, 'name': PowerOnVM_Task, 'duration_secs': 0.397986} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.678847] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 683.678847] env[68233]: DEBUG nova.compute.manager [None req-1cdbe206-8ace-4bfb-8ea1-18823992548b tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 683.678847] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe164720-1aaa-4d5f-ae3d-9719f27a8eb6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.774510] env[68233]: DEBUG oslo_vmware.api [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781990, 'name': PowerOffVM_Task, 'duration_secs': 0.276349} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.775014] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 683.775235] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 683.775601] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b676418-eb7b-4cdd-881b-794e01f3bcd9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.781264] env[68233]: ERROR nova.scheduler.client.report [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [req-53f33cde-ab36-44fe-9eb7-18920564b624] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-53f33cde-ab36-44fe-9eb7-18920564b624"}]} [ 683.798659] env[68233]: DEBUG nova.scheduler.client.report [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 683.811411] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Releasing lock "refresh_cache-636b6b36-3ab5-4851-a232-d27b54895595" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.811746] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Instance network_info: |[{"id": "489605bb-d528-4c3f-a258-3e2ff5d49913", "address": "fa:16:3e:66:65:d1", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap489605bb-d5", "ovs_interfaceid": "489605bb-d528-4c3f-a258-3e2ff5d49913", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 683.812068] env[68233]: DEBUG oslo_concurrency.lockutils [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] Acquired lock "refresh_cache-636b6b36-3ab5-4851-a232-d27b54895595" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.812390] env[68233]: DEBUG nova.network.neutron [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Refreshing network info cache for port 489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.813630] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:65:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '489605bb-d528-4c3f-a258-3e2ff5d49913', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.822489] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Creating folder: Project (548b84c71d3d4c2e9f13912fb8ac0e90). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.824147] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d3693ef-ef35-4c09-b614-3e813502898f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.826516] env[68233]: DEBUG nova.scheduler.client.report [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 683.826969] env[68233]: DEBUG nova.compute.provider_tree [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.839336] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Created folder: Project (548b84c71d3d4c2e9f13912fb8ac0e90) in parent group-v559223. [ 683.841033] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Creating folder: Instances. Parent ref: group-v559312. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.841033] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfe11612-a6f8-4042-9baf-683552c6278f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.843709] env[68233]: DEBUG nova.scheduler.client.report [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 683.849561] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 683.849800] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 683.850022] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleting the datastore file [datastore2] a340c66c-74eb-43e5-8e72-54d9c8b07a26 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 683.850770] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eddb935-5583-4e2e-860f-540c7a8a0101 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.856343] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Created folder: Instances in parent group-v559312. [ 683.856568] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.857837] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.858249] env[68233]: DEBUG oslo_vmware.api [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for the task: (returnval){ [ 683.858249] env[68233]: value = "task-2781995" [ 683.858249] env[68233]: _type = "Task" [ 683.858249] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.858497] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b9f8c73-758d-4d10-8c54-a508174f385d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.875762] env[68233]: DEBUG nova.scheduler.client.report [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 683.889942] env[68233]: DEBUG oslo_vmware.api [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781995, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.891488] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.891488] env[68233]: value = "task-2781996" [ 683.891488] env[68233]: _type = "Task" [ 683.891488] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.899386] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781996, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.059549] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781991, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034695} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.059953] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.060765] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2868cb59-23f3-4bf1-81c7-cb4a81755f9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.069828] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 684.069828] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528c2ad0-1e26-656a-9217-c54d8f98e66b" [ 684.069828] env[68233]: _type = "Task" [ 684.069828] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.079018] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528c2ad0-1e26-656a-9217-c54d8f98e66b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.388553] env[68233]: DEBUG oslo_vmware.api [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Task: {'id': task-2781995, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.213666} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.390880] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 684.391122] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 684.391309] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 684.391524] env[68233]: INFO nova.compute.manager [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Took 1.17 seconds to destroy the instance on the hypervisor. [ 684.391732] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 684.392618] env[68233]: DEBUG nova.compute.manager [-] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 684.392734] env[68233]: DEBUG nova.network.neutron [-] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 684.406224] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2781996, 'name': CreateVM_Task, 'duration_secs': 0.364733} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.406224] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 684.406224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.406224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.406224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 684.406443] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19240816-6d84-4e43-916e-e26f84fd4835 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.412386] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 684.412386] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52703fd6-5361-0293-d687-dee721942644" [ 684.412386] env[68233]: _type = "Task" [ 684.412386] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.421080] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52703fd6-5361-0293-d687-dee721942644, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.438144] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16722b1-1e74-47c7-b5a4-92a3863be5ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.446770] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7091cb-7960-4084-b111-4c55d21af064 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.484195] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b62981-4728-46e9-aa9a-c493a3866a07 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.493596] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029227e6-7fc1-4640-ade4-81078c233ad9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.514790] env[68233]: DEBUG nova.compute.provider_tree [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.581242] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528c2ad0-1e26-656a-9217-c54d8f98e66b, 'name': SearchDatastore_Task, 'duration_secs': 0.010808} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.581520] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.581766] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 0f7d80d2-5c34-42f7-a14a-97f9625675a8/0f7d80d2-5c34-42f7-a14a-97f9625675a8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 684.582070] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.582334] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.582611] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a27403eb-517c-43c8-a3e5-d46130c8755b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.585285] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b10d8d6e-a973-4dac-81fd-325f8587e774 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.592587] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 684.592587] env[68233]: value = "task-2781997" [ 684.592587] env[68233]: _type = "Task" [ 684.592587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.597433] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.597645] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.598752] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28118993-e406-4e0a-8322-43dd5b152e3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.605025] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781997, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.611271] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 684.611271] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2d227-1c56-ba5e-5210-cb66a8efbf3a" [ 684.611271] env[68233]: _type = "Task" [ 684.611271] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.620091] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2d227-1c56-ba5e-5210-cb66a8efbf3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.725496] env[68233]: DEBUG nova.network.neutron [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Updated VIF entry in instance network info cache for port 489605bb-d528-4c3f-a258-3e2ff5d49913. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.726304] env[68233]: DEBUG nova.network.neutron [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Updating instance_info_cache with network_info: [{"id": "489605bb-d528-4c3f-a258-3e2ff5d49913", "address": "fa:16:3e:66:65:d1", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.124", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap489605bb-d5", "ovs_interfaceid": "489605bb-d528-4c3f-a258-3e2ff5d49913", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.924268] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52703fd6-5361-0293-d687-dee721942644, 'name': SearchDatastore_Task, 'duration_secs': 0.02189} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.924268] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.924268] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.924268] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.061367] env[68233]: DEBUG nova.scheduler.client.report [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 685.061661] env[68233]: DEBUG nova.compute.provider_tree [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 55 to 56 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 685.061850] env[68233]: DEBUG nova.compute.provider_tree [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.102842] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781997, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.121794] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2d227-1c56-ba5e-5210-cb66a8efbf3a, 'name': SearchDatastore_Task, 'duration_secs': 0.010157} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.122655] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9efbcdca-4fcb-4a6c-810d-e3856f127d37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.128898] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 685.128898] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5296c13e-9c45-06f3-3b77-ab39f2eeb939" [ 685.128898] env[68233]: _type = "Task" [ 685.128898] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.138198] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5296c13e-9c45-06f3-3b77-ab39f2eeb939, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.229376] env[68233]: DEBUG oslo_concurrency.lockutils [req-78018996-ec2a-4acb-9abb-46593f34acb3 req-9f003a8e-4a41-4888-a173-91de5aa920dd service nova] Releasing lock "refresh_cache-636b6b36-3ab5-4851-a232-d27b54895595" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.309104] env[68233]: DEBUG nova.network.neutron [-] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.472856] env[68233]: DEBUG nova.compute.manager [req-099cfcf4-ed4e-4a94-8b5b-e65c2a8d321f req-a4b4bc7e-4b5e-401b-94b7-885925d3b519 service nova] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Received event network-vif-deleted-f1ca2437-5a7d-4e37-9f83-6cbec685618e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 685.567972] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.928s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.568511] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.572348] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.793s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.572506] env[68233]: DEBUG nova.objects.instance [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lazy-loading 'resources' on Instance uuid d1577f70-4fb6-4b0b-9d41-8d245c26c90c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 685.604043] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781997, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613337} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.604427] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 0f7d80d2-5c34-42f7-a14a-97f9625675a8/0f7d80d2-5c34-42f7-a14a-97f9625675a8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 685.604677] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 685.605015] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d69c189f-e9eb-41ac-aa3e-3fc429981272 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.613204] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 685.613204] env[68233]: value = "task-2781998" [ 685.613204] env[68233]: _type = "Task" [ 685.613204] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.623103] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781998, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.640122] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5296c13e-9c45-06f3-3b77-ab39f2eeb939, 'name': SearchDatastore_Task, 'duration_secs': 0.057823} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.640380] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.640660] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 88d67405-b8c6-484a-b178-68a8babb3708/88d67405-b8c6-484a-b178-68a8babb3708.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.640977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.641263] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.641410] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fcc07f10-cc5e-42d5-91e3-e0c99000b7f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.643588] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dab0bad-fcaf-474a-b7ca-4a9f909b973d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.650807] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 685.650807] env[68233]: value = "task-2781999" [ 685.650807] env[68233]: _type = "Task" [ 685.650807] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.655497] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.655743] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 685.657484] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43ef72ee-5aad-4592-bdf1-1d51559a89ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.663761] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2781999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.667202] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 685.667202] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523d2a0a-6281-4da6-c475-48a88b412fb2" [ 685.667202] env[68233]: _type = "Task" [ 685.667202] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.676179] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523d2a0a-6281-4da6-c475-48a88b412fb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.813043] env[68233]: INFO nova.compute.manager [-] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Took 1.42 seconds to deallocate network for instance. [ 686.076652] env[68233]: DEBUG nova.compute.utils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 686.084045] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 686.084283] env[68233]: DEBUG nova.network.neutron [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 686.129017] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2781998, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069033} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.129313] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.130290] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c397f4-ddcc-4ce0-9e92-784816f15a5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.159560] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 0f7d80d2-5c34-42f7-a14a-97f9625675a8/0f7d80d2-5c34-42f7-a14a-97f9625675a8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.162748] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bccfb6b-454b-436c-8b86-fffb124a53b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.179055] env[68233]: DEBUG nova.policy [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ae65c06e82e4d0da88754c35e8079b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '526492ca2e3440dbbe9e7027588f5a0c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 686.194878] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523d2a0a-6281-4da6-c475-48a88b412fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.009483} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.198127] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 686.198127] env[68233]: value = "task-2782000" [ 686.198127] env[68233]: _type = "Task" [ 686.198127] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.198409] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2781999, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491247} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.201132] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be42203c-1b56-4d25-addf-50be4b0451e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.203500] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 88d67405-b8c6-484a-b178-68a8babb3708/88d67405-b8c6-484a-b178-68a8babb3708.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.203737] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.209028] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73bf85ed-34a8-4bd4-b1c9-f770960e79ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.211959] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 686.211959] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5258072f-253c-eb13-8389-6c8977eee557" [ 686.211959] env[68233]: _type = "Task" [ 686.211959] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.215968] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.220443] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 686.220443] env[68233]: value = "task-2782001" [ 686.220443] env[68233]: _type = "Task" [ 686.220443] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.232533] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5258072f-253c-eb13-8389-6c8977eee557, 'name': SearchDatastore_Task, 'duration_secs': 0.012763} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.233228] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.233505] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 636b6b36-3ab5-4851-a232-d27b54895595/636b6b36-3ab5-4851-a232-d27b54895595.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 686.233771] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d110250e-aa75-4307-9385-3c28c7f32d8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.240107] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.246893] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 686.246893] env[68233]: value = "task-2782002" [ 686.246893] env[68233]: _type = "Task" [ 686.246893] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.256401] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.319649] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 686.518933] env[68233]: DEBUG nova.network.neutron [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Successfully created port: 1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.587772] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.721749] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782000, 'name': ReconfigVM_Task, 'duration_secs': 0.444022} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.721749] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 0f7d80d2-5c34-42f7-a14a-97f9625675a8/0f7d80d2-5c34-42f7-a14a-97f9625675a8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 686.721749] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7493c0fe-5318-4be2-acb6-ea1b4f6e1d9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.733819] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 686.733819] env[68233]: value = "task-2782003" [ 686.733819] env[68233]: _type = "Task" [ 686.733819] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.737724] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067543} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.741895] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.742688] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b2cb04-1112-4bd8-862a-2e843e429ac0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.747367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99468d7-15af-4806-85fb-31c5ff919ee1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.757899] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782003, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.769193] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77fbedd-9b7f-4869-b2cd-8d7e11a9f073 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.780692] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 88d67405-b8c6-484a-b178-68a8babb3708/88d67405-b8c6-484a-b178-68a8babb3708.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.787178] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52d9c688-2ed1-45e1-a5b8-76ec5b372a7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.801811] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519852} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.802536] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 636b6b36-3ab5-4851-a232-d27b54895595/636b6b36-3ab5-4851-a232-d27b54895595.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.802754] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.803322] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c88268b0-5354-4be5-8bab-f19eab6c845f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.836842] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4e4f73a-b83d-4a20-8062-3ac54d2a55ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.839619] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 686.839619] env[68233]: value = "task-2782004" [ 686.839619] env[68233]: _type = "Task" [ 686.839619] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.841263] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 686.841263] env[68233]: value = "task-2782005" [ 686.841263] env[68233]: _type = "Task" [ 686.841263] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.851924] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea2dcd3-9873-4431-8f21-c091f2d11b11 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.860559] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.864305] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.872730] env[68233]: DEBUG nova.compute.provider_tree [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.104561] env[68233]: INFO nova.virt.block_device [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Booting with volume 9bb63a6b-3e52-4693-a250-876762d38f26 at /dev/sda [ 687.157948] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27f05955-8b44-4fae-b83b-6949d81e434c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.170878] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81b22c3-73f5-4417-8baa-3c4dedae6a59 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.204189] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6116ee91-19b7-4752-bb89-250587eed8c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.212993] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9835d209-821e-45ae-b9e9-b6b55e5b88af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.250201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc04da3c-2e70-4247-9e69-e852b071552d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.261623] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782003, 'name': Rename_Task, 'duration_secs': 0.15118} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.262582] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace076dd-b7c6-4607-87e0-aa9ed5884b79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.265176] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 687.265435] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4579a3fa-b2b6-4b8c-866a-65aebd41ef44 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.272325] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 687.272325] env[68233]: value = "task-2782006" [ 687.272325] env[68233]: _type = "Task" [ 687.272325] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.281288] env[68233]: DEBUG nova.virt.block_device [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updating existing volume attachment record: e5358588-4354-472c-852f-13048dd54e20 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 687.289060] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782006, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.354120] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113981} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.357066] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 687.359073] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782004, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.359073] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2838e4d-ae31-4703-9108-de545a78d000 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.381870] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 636b6b36-3ab5-4851-a232-d27b54895595/636b6b36-3ab5-4851-a232-d27b54895595.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 687.382886] env[68233]: DEBUG nova.scheduler.client.report [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 687.386104] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e9ca2e1-be64-423a-bc1f-124bbc30a59a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.408486] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 687.408486] env[68233]: value = "task-2782007" [ 687.408486] env[68233]: _type = "Task" [ 687.408486] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.417470] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782007, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.784340] env[68233]: DEBUG oslo_vmware.api [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782006, 'name': PowerOnVM_Task, 'duration_secs': 0.487746} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.784340] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.784340] env[68233]: INFO nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Took 11.90 seconds to spawn the instance on the hypervisor. [ 687.784340] env[68233]: DEBUG nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.784753] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-536bf041-74ec-44af-9bbd-e2ef220b6da2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.852615] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782004, 'name': ReconfigVM_Task, 'duration_secs': 0.565647} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.852888] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 88d67405-b8c6-484a-b178-68a8babb3708/88d67405-b8c6-484a-b178-68a8babb3708.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.853620] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d6d8ff1-0787-4804-9ab3-1b76d552f195 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.860454] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 687.860454] env[68233]: value = "task-2782008" [ 687.860454] env[68233]: _type = "Task" [ 687.860454] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.871042] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782008, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.902988] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.331s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.905856] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 32.930s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.919976] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782007, 'name': ReconfigVM_Task, 'duration_secs': 0.35298} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.919976] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 636b6b36-3ab5-4851-a232-d27b54895595/636b6b36-3ab5-4851-a232-d27b54895595.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.922308] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a21d7da-a96c-4bc5-b42f-363f0cb978bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.925849] env[68233]: INFO nova.scheduler.client.report [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Deleted allocations for instance d1577f70-4fb6-4b0b-9d41-8d245c26c90c [ 687.933774] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 687.933774] env[68233]: value = "task-2782009" [ 687.933774] env[68233]: _type = "Task" [ 687.933774] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.942061] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782009, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.012648] env[68233]: DEBUG nova.compute.manager [req-d96aa70d-5467-4e08-95fe-091e845cb103 req-66d1f7ff-0752-414e-a272-0742b9a4e36b service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Received event network-vif-plugged-1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 688.012788] env[68233]: DEBUG oslo_concurrency.lockutils [req-d96aa70d-5467-4e08-95fe-091e845cb103 req-66d1f7ff-0752-414e-a272-0742b9a4e36b service nova] Acquiring lock "3c9b701e-6461-45e3-8654-3291c5a487b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.013045] env[68233]: DEBUG oslo_concurrency.lockutils [req-d96aa70d-5467-4e08-95fe-091e845cb103 req-66d1f7ff-0752-414e-a272-0742b9a4e36b service nova] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.013229] env[68233]: DEBUG oslo_concurrency.lockutils [req-d96aa70d-5467-4e08-95fe-091e845cb103 req-66d1f7ff-0752-414e-a272-0742b9a4e36b service nova] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.013446] env[68233]: DEBUG nova.compute.manager [req-d96aa70d-5467-4e08-95fe-091e845cb103 req-66d1f7ff-0752-414e-a272-0742b9a4e36b service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] No waiting events found dispatching network-vif-plugged-1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 688.013627] env[68233]: WARNING nova.compute.manager [req-d96aa70d-5467-4e08-95fe-091e845cb103 req-66d1f7ff-0752-414e-a272-0742b9a4e36b service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Received unexpected event network-vif-plugged-1ac399a3-6f36-48cc-8104-c828a414b1b0 for instance with vm_state building and task_state block_device_mapping. [ 688.106693] env[68233]: DEBUG nova.network.neutron [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Successfully updated port: 1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 688.305235] env[68233]: INFO nova.compute.manager [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Took 47.84 seconds to build instance. [ 688.376446] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782008, 'name': Rename_Task, 'duration_secs': 0.207281} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.377016] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.377289] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e83bbcc-f000-4de9-8db1-3d9b0fc3187c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.384339] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 688.384339] env[68233]: value = "task-2782010" [ 688.384339] env[68233]: _type = "Task" [ 688.384339] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.393547] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782010, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.417546] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.417863] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.433370] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f47281dd-641e-4db1-9056-3d1ad2070002 tempest-ServerExternalEventsTest-1611400731 tempest-ServerExternalEventsTest-1611400731-project-member] Lock "d1577f70-4fb6-4b0b-9d41-8d245c26c90c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.212s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.448205] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782009, 'name': Rename_Task, 'duration_secs': 0.223271} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.449109] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.449109] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2aa11d13-40c2-44b6-a02a-e13a9d579d13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.457643] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 688.457643] env[68233]: value = "task-2782011" [ 688.457643] env[68233]: _type = "Task" [ 688.457643] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.469548] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782011, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.610643] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquiring lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.610808] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquired lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.610983] env[68233]: DEBUG nova.network.neutron [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.810690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-692ea0e5-1300-4156-9329-2fa53bf7e8c5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.116s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.897739] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782010, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.903585] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838f42ec-fb6e-4851-a15c-8ba7c8e71aae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.911149] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f387ea14-bef5-44b6-9252-8150edaaf753 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.948055] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035d0aae-6cff-42b9-82e3-d2f4281e8b3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.955989] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62984df-5185-4fde-ba7a-2e3c51b2a2f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.977797] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782011, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.978449] env[68233]: DEBUG nova.compute.provider_tree [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 689.151724] env[68233]: DEBUG nova.network.neutron [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 689.240345] env[68233]: DEBUG oslo_concurrency.lockutils [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.240462] env[68233]: DEBUG oslo_concurrency.lockutils [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.240702] env[68233]: INFO nova.compute.manager [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Rebooting instance [ 689.313371] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 689.317482] env[68233]: DEBUG nova.network.neutron [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updating instance_info_cache with network_info: [{"id": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "address": "fa:16:3e:ee:89:df", "network": {"id": "23b82ec7-3a62-44d1-9584-751137707d0d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1001397050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "526492ca2e3440dbbe9e7027588f5a0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac399a3-6f", "ovs_interfaceid": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.395289] env[68233]: DEBUG oslo_vmware.api [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782010, 'name': PowerOnVM_Task, 'duration_secs': 0.768042} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.395566] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.395774] env[68233]: INFO nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Took 10.74 seconds to spawn the instance on the hypervisor. [ 689.396117] env[68233]: DEBUG nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.396787] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1923d244-d901-45a1-9d50-453199bfc533 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.451250] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 689.451782] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 689.452007] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 689.452270] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 689.452459] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 689.452611] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 689.452761] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 689.452972] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 689.454076] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 689.454676] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 689.454769] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 689.454971] env[68233]: DEBUG nova.virt.hardware [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 689.457151] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9604654f-d95b-4978-b6c9-a356bb6b9f34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.468533] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d12e147-ecba-48d6-b823-4d9512728d4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.476766] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782011, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.490281] env[68233]: DEBUG nova.scheduler.client.report [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 689.765959] env[68233]: DEBUG oslo_concurrency.lockutils [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.766183] env[68233]: DEBUG oslo_concurrency.lockutils [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquired lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.766362] env[68233]: DEBUG nova.network.neutron [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.823529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Releasing lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.824605] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Instance network_info: |[{"id": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "address": "fa:16:3e:ee:89:df", "network": {"id": "23b82ec7-3a62-44d1-9584-751137707d0d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1001397050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "526492ca2e3440dbbe9e7027588f5a0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac399a3-6f", "ovs_interfaceid": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.827069] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:89:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50cf0a70-948d-4611-af05-94c1483064ed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ac399a3-6f36-48cc-8104-c828a414b1b0', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.836452] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Creating folder: Project (526492ca2e3440dbbe9e7027588f5a0c). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.837312] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-596f4650-e355-4c0d-87ee-3acfd7406602 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.852158] env[68233]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 689.852366] env[68233]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68233) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 689.852807] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Folder already exists: Project (526492ca2e3440dbbe9e7027588f5a0c). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 689.853089] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Creating folder: Instances. Parent ref: group-v559256. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.854229] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.854469] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-013b5624-1dbc-4ff3-83a2-9fb46b117458 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.864139] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Created folder: Instances in parent group-v559256. [ 689.864382] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.864582] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.864788] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8a7a6fd-3ae8-4aa3-8ba3-8945bb3d85f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.885323] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.885323] env[68233]: value = "task-2782014" [ 689.885323] env[68233]: _type = "Task" [ 689.885323] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.893813] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782014, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.917013] env[68233]: INFO nova.compute.manager [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Took 45.20 seconds to build instance. [ 689.973173] env[68233]: DEBUG oslo_vmware.api [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782011, 'name': PowerOnVM_Task, 'duration_secs': 1.044561} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.973694] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.975314] env[68233]: INFO nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Took 8.54 seconds to spawn the instance on the hypervisor. [ 689.975314] env[68233]: DEBUG nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.975434] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607fd0b7-d00e-4c4c-b481-924fdcb9c0e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.334444] env[68233]: DEBUG nova.compute.manager [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Received event network-changed-1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 690.334706] env[68233]: DEBUG nova.compute.manager [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Refreshing instance network info cache due to event network-changed-1ac399a3-6f36-48cc-8104-c828a414b1b0. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 690.336024] env[68233]: DEBUG oslo_concurrency.lockutils [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] Acquiring lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.336024] env[68233]: DEBUG oslo_concurrency.lockutils [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] Acquired lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.336024] env[68233]: DEBUG nova.network.neutron [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Refreshing network info cache for port 1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 690.396651] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782014, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.419736] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae5c40b3-35b5-4fb6-a1d8-5a3919c8678f tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "88d67405-b8c6-484a-b178-68a8babb3708" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.961s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.453513] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52700ac9-3da1-ed2c-4026-db48bbeb5478/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 690.454981] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2221e274-fb92-44f4-8fd3-8ef10bdc09d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.461792] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52700ac9-3da1-ed2c-4026-db48bbeb5478/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 690.462699] env[68233]: ERROR oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52700ac9-3da1-ed2c-4026-db48bbeb5478/disk-0.vmdk due to incomplete transfer. [ 690.463086] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cb8ca1f7-3875-43ef-8720-55072e2fb1c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.472780] env[68233]: DEBUG oslo_vmware.rw_handles [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52700ac9-3da1-ed2c-4026-db48bbeb5478/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 690.473023] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Uploaded image ad54b4da-5373-4eb1-bcc7-b5685fe04812 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 690.475176] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 690.475460] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0b7d8b2c-2e8b-4115-9a98-d7d9ec057c06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.487135] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 690.487135] env[68233]: value = "task-2782015" [ 690.487135] env[68233]: _type = "Task" [ 690.487135] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.499997] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782015, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.499997] env[68233]: INFO nova.compute.manager [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Took 41.50 seconds to build instance. [ 690.502779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.597s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.506670] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.311s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.509779] env[68233]: INFO nova.compute.claims [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.521986] env[68233]: DEBUG nova.network.neutron [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Updating instance_info_cache with network_info: [{"id": "436ed418-46de-465b-920d-6fddf7ec041d", "address": "fa:16:3e:b3:33:82", "network": {"id": "7cfaf07f-a0d1-4eb5-9524-24a3604eeaa7", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-614869909-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c98b110be96f495ab5ef126a45b8328e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d062877-0b23-4965-908b-f585f25f3bf1", "external-id": "nsx-vlan-transportzone-523", "segmentation_id": 523, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap436ed418-46", "ovs_interfaceid": "436ed418-46de-465b-920d-6fddf7ec041d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.895286] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782014, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.924031] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.998838] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782015, 'name': Destroy_Task, 'duration_secs': 0.362704} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.999158] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Destroyed the VM [ 690.999428] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 691.000182] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5d9bf389-ab6d-4648-9764-a710474d5bc6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.001853] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3642a679-7865-4d24-ae13-4ee4083707ad tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "636b6b36-3ab5-4851-a232-d27b54895595" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.167s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.008635] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 691.008635] env[68233]: value = "task-2782016" [ 691.008635] env[68233]: _type = "Task" [ 691.008635] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.022124] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782016, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.025760] env[68233]: DEBUG oslo_concurrency.lockutils [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Releasing lock "refresh_cache-0f7d80d2-5c34-42f7-a14a-97f9625675a8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.097025] env[68233]: INFO nova.scheduler.client.report [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleted allocation for migration f9c4180d-3134-46ba-8082-85301d976f9c [ 691.395373] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782014, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.405236] env[68233]: DEBUG nova.network.neutron [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updated VIF entry in instance network info cache for port 1ac399a3-6f36-48cc-8104-c828a414b1b0. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 691.405720] env[68233]: DEBUG nova.network.neutron [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updating instance_info_cache with network_info: [{"id": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "address": "fa:16:3e:ee:89:df", "network": {"id": "23b82ec7-3a62-44d1-9584-751137707d0d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1001397050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "526492ca2e3440dbbe9e7027588f5a0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac399a3-6f", "ovs_interfaceid": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.444581] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.508216] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 691.535572] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782016, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.536034] env[68233]: DEBUG nova.compute.manager [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.537475] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60020958-750b-45eb-9175-df9aa93cc1d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.585672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "88d67405-b8c6-484a-b178-68a8babb3708" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.585672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "88d67405-b8c6-484a-b178-68a8babb3708" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.585672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "88d67405-b8c6-484a-b178-68a8babb3708-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.585672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "88d67405-b8c6-484a-b178-68a8babb3708-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.586857] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "88d67405-b8c6-484a-b178-68a8babb3708-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.587993] env[68233]: INFO nova.compute.manager [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Terminating instance [ 691.605543] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69d505c-fbb1-48bb-9dc9-ebf47ceb7d43 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 40.183s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.900091] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782014, 'name': CreateVM_Task, 'duration_secs': 1.532318} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.900188] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.900894] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559266', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'name': 'volume-9bb63a6b-3e52-4693-a250-876762d38f26', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c9b701e-6461-45e3-8654-3291c5a487b9', 'attached_at': '', 'detached_at': '', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'serial': '9bb63a6b-3e52-4693-a250-876762d38f26'}, 'boot_index': 0, 'attachment_id': 'e5358588-4354-472c-852f-13048dd54e20', 'mount_device': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68233) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 691.901213] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Root volume attach. Driver type: vmdk {{(pid=68233) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 691.901921] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86bfcb09-65bd-4235-a102-b8aa3a75027a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.910196] env[68233]: DEBUG oslo_concurrency.lockutils [req-c0d76b06-5d78-44ec-ab7a-804c77ecb64e req-7f8fad28-13a4-4fe9-8f23-61696e501563 service nova] Releasing lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.913714] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c93d134-8f35-468c-984c-2a78b138f08f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.920191] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cedce7-ad20-4b58-b2d7-62633f378556 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.930486] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b6316aa0-f338-4ebb-8049-7f0542a63073 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.937293] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 691.937293] env[68233]: value = "task-2782017" [ 691.937293] env[68233]: _type = "Task" [ 691.937293] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.946635] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782017, 'name': RelocateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.035507] env[68233]: DEBUG oslo_vmware.api [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782016, 'name': RemoveSnapshot_Task, 'duration_secs': 0.645613} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.035942] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 692.036312] env[68233]: INFO nova.compute.manager [None req-43024914-c00f-4d6e-b9e9-e8836a330c59 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Took 14.13 seconds to snapshot the instance on the hypervisor. [ 692.057600] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.096197] env[68233]: DEBUG nova.compute.manager [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 692.096380] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 692.097666] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9008df2c-682a-4ed6-b524-cef0c6faffb9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.121418] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 692.121826] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1fd6fa5-67b5-4507-ac23-c653d2e2c8bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.129486] env[68233]: DEBUG oslo_vmware.api [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 692.129486] env[68233]: value = "task-2782018" [ 692.129486] env[68233]: _type = "Task" [ 692.129486] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.139012] env[68233]: DEBUG oslo_vmware.api [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.343652] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a54ee67-85fe-4536-96b3-0a4c586a41b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.352494] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71aa16c5-7e06-45bb-9a41-bba582b0c5eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.389720] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c0f676-973d-4a5f-b536-c07862e7bc13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.398788] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b6788a-ab9a-4bd2-a7d9-53333d1ff0e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.415706] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 692.447513] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782017, 'name': RelocateVM_Task} progress is 20%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.562973] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8180445f-0df6-450b-b658-fc9520e1d519 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.573983] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Doing hard reboot of VM {{(pid=68233) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 692.574270] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-35670084-16f6-43ba-a569-85c25bc03317 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.583045] env[68233]: DEBUG oslo_vmware.api [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 692.583045] env[68233]: value = "task-2782019" [ 692.583045] env[68233]: _type = "Task" [ 692.583045] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.590682] env[68233]: DEBUG oslo_vmware.api [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782019, 'name': ResetVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.639490] env[68233]: DEBUG oslo_vmware.api [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782018, 'name': PowerOffVM_Task, 'duration_secs': 0.233866} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.639756] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 692.639960] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 692.640231] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21666d13-3e90-4c91-bbd7-ac930045265b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.694178] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "636b6b36-3ab5-4851-a232-d27b54895595" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.694461] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "636b6b36-3ab5-4851-a232-d27b54895595" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.694665] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "636b6b36-3ab5-4851-a232-d27b54895595-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.694847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "636b6b36-3ab5-4851-a232-d27b54895595-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.695028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "636b6b36-3ab5-4851-a232-d27b54895595-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.697635] env[68233]: INFO nova.compute.manager [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Terminating instance [ 692.707150] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 692.707391] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 692.707576] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Deleting the datastore file [datastore1] 88d67405-b8c6-484a-b178-68a8babb3708 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 692.708805] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fb76bdb0-a91d-4617-be97-ee9cfc0317d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.717101] env[68233]: DEBUG oslo_vmware.api [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for the task: (returnval){ [ 692.717101] env[68233]: value = "task-2782021" [ 692.717101] env[68233]: _type = "Task" [ 692.717101] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.726496] env[68233]: DEBUG oslo_vmware.api [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.943927] env[68233]: ERROR nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [req-33e324e6-bff9-4747-9553-82715402f03c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-33e324e6-bff9-4747-9553-82715402f03c"}]} [ 692.950647] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782017, 'name': RelocateVM_Task} progress is 20%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.969534] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 692.983902] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 692.984196] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 692.996254] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 693.026219] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 693.094366] env[68233]: DEBUG oslo_vmware.api [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782019, 'name': ResetVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.207287] env[68233]: DEBUG nova.compute.manager [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 693.207613] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 693.209432] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7daadf09-aef3-45d3-a74d-952a3ec9578c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.217368] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 693.217692] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98ea3056-a50c-4390-98a8-e23fa2ce8c28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.237131] env[68233]: DEBUG oslo_vmware.api [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Task: {'id': task-2782021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395671} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.238482] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 693.238482] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 693.239300] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 693.239300] env[68233]: INFO nova.compute.manager [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Took 1.14 seconds to destroy the instance on the hypervisor. [ 693.239300] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.239592] env[68233]: DEBUG oslo_vmware.api [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 693.239592] env[68233]: value = "task-2782022" [ 693.239592] env[68233]: _type = "Task" [ 693.239592] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.239707] env[68233]: DEBUG nova.compute.manager [-] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 693.239707] env[68233]: DEBUG nova.network.neutron [-] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 693.252390] env[68233]: DEBUG oslo_vmware.api [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.448832] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782017, 'name': RelocateVM_Task} progress is 20%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.556182] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "72467d49-6fa8-42db-871e-4e50e77eedf7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.556182] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.594308] env[68233]: DEBUG oslo_vmware.api [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782019, 'name': ResetVM_Task, 'duration_secs': 0.92558} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.595163] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Did hard reboot of VM {{(pid=68233) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 693.595163] env[68233]: DEBUG nova.compute.manager [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 693.595916] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df09822-02aa-413e-9472-801339943c41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.600603] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c248f303-c07c-46b6-90e7-a511e4a6ec70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.612629] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2410409-c62c-4c11-aa9d-a7db98dc8f12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.647764] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4a5295-f069-4278-8580-582b22e88b99 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.657475] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28651cd5-dcdd-4a2a-86da-5dfb63fe4eb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.674781] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 693.752986] env[68233]: DEBUG oslo_vmware.api [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782022, 'name': PowerOffVM_Task, 'duration_secs': 0.46112} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.753497] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 693.753716] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 693.753972] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fd5117c-32b6-4921-bc25-ed0113655771 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.826191] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 693.826846] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 693.826846] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Deleting the datastore file [datastore1] 636b6b36-3ab5-4851-a232-d27b54895595 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 693.827015] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eee6725a-b82d-454d-8bb9-4042a5cf786f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.833514] env[68233]: DEBUG oslo_vmware.api [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for the task: (returnval){ [ 693.833514] env[68233]: value = "task-2782024" [ 693.833514] env[68233]: _type = "Task" [ 693.833514] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.842035] env[68233]: DEBUG oslo_vmware.api [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.949938] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782017, 'name': RelocateVM_Task, 'duration_secs': 1.526713} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.953629] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 693.953629] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559266', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'name': 'volume-9bb63a6b-3e52-4693-a250-876762d38f26', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c9b701e-6461-45e3-8654-3291c5a487b9', 'attached_at': '', 'detached_at': '', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'serial': '9bb63a6b-3e52-4693-a250-876762d38f26'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 693.954422] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09a319d-9ebb-4ed2-a6cd-f4a21aa1e69d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.961266] env[68233]: DEBUG nova.compute.manager [req-40f81bd1-71be-43f7-a61c-d1c530823095 req-fc89ef17-43d2-4424-a17b-1f08410382da service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Received event network-vif-deleted-786586a9-c15b-4009-9c4b-ce3b65a85ea1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 693.961577] env[68233]: INFO nova.compute.manager [req-40f81bd1-71be-43f7-a61c-d1c530823095 req-fc89ef17-43d2-4424-a17b-1f08410382da service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Neutron deleted interface 786586a9-c15b-4009-9c4b-ce3b65a85ea1; detaching it from the instance and deleting it from the info cache [ 693.961813] env[68233]: DEBUG nova.network.neutron [req-40f81bd1-71be-43f7-a61c-d1c530823095 req-fc89ef17-43d2-4424-a17b-1f08410382da service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.980846] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67740e64-edb2-4cd5-b00b-dda3d44c4bff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.013634] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] volume-9bb63a6b-3e52-4693-a250-876762d38f26/volume-9bb63a6b-3e52-4693-a250-876762d38f26.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.014795] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3136e73-da7b-4a1c-83d4-4238f7f70fe5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.043352] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 694.043352] env[68233]: value = "task-2782025" [ 694.043352] env[68233]: _type = "Task" [ 694.043352] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.054935] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782025, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.118495] env[68233]: DEBUG oslo_concurrency.lockutils [None req-988218df-74bf-4ece-829c-5d61f006475b tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.878s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.202129] env[68233]: ERROR nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [req-f81b32d9-41ab-4c39-8c6f-5099bc347c74] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f81b32d9-41ab-4c39-8c6f-5099bc347c74"}]} [ 694.227404] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 694.245488] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 694.245488] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 694.261764] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 694.294738] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 694.305158] env[68233]: DEBUG nova.network.neutron [-] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.343735] env[68233]: DEBUG oslo_vmware.api [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Task: {'id': task-2782024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285141} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.344037] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 694.344228] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 694.344400] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 694.344563] env[68233]: INFO nova.compute.manager [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Took 1.14 seconds to destroy the instance on the hypervisor. [ 694.344890] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.345249] env[68233]: DEBUG nova.compute.manager [-] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 694.345375] env[68233]: DEBUG nova.network.neutron [-] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 694.465582] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8db84b2e-8c8a-4bac-9fa5-4c761fe70065 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.475278] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f01cec3-3506-447b-a866-c4aaa9ca3615 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.513296] env[68233]: DEBUG nova.compute.manager [req-40f81bd1-71be-43f7-a61c-d1c530823095 req-fc89ef17-43d2-4424-a17b-1f08410382da service nova] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Detach interface failed, port_id=786586a9-c15b-4009-9c4b-ce3b65a85ea1, reason: Instance 88d67405-b8c6-484a-b178-68a8babb3708 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 694.517196] env[68233]: DEBUG nova.compute.manager [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.518515] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbea5e5c-3a00-4963-b110-7b7bf007e1de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.553959] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782025, 'name': ReconfigVM_Task, 'duration_secs': 0.327579} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.556828] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Reconfigured VM instance instance-0000001e to attach disk [datastore2] volume-9bb63a6b-3e52-4693-a250-876762d38f26/volume-9bb63a6b-3e52-4693-a250-876762d38f26.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 694.562582] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-073efa9b-164a-43d4-91c0-8280684c3295 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.580132] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 694.580132] env[68233]: value = "task-2782026" [ 694.580132] env[68233]: _type = "Task" [ 694.580132] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.593231] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782026, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.808572] env[68233]: INFO nova.compute.manager [-] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Took 1.57 seconds to deallocate network for instance. [ 694.864272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.864272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.864272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.864272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.864404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.865021] env[68233]: INFO nova.compute.manager [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Terminating instance [ 694.907460] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f50c8b-2b86-482a-be3e-dcd319da5aa1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.915057] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5603703c-5b28-4500-b124-23fc794e2bd7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.948532] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c6cf3c-45f8-4e0e-a5bb-f0436bd91c27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.958801] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9f81a3-494a-42c9-9922-cf83b43e458d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.973739] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 695.032224] env[68233]: INFO nova.compute.manager [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] instance snapshotting [ 695.037153] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ea5e12-41af-476c-96e4-12fed3f52285 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.058038] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fff7f42-2d7d-4ee6-8cab-121886465f0e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.091442] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782026, 'name': ReconfigVM_Task, 'duration_secs': 0.120111} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.091812] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559266', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'name': 'volume-9bb63a6b-3e52-4693-a250-876762d38f26', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c9b701e-6461-45e3-8654-3291c5a487b9', 'attached_at': '', 'detached_at': '', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'serial': '9bb63a6b-3e52-4693-a250-876762d38f26'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 695.092490] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2917a73-7b58-446d-9ceb-c0097c873513 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.100979] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 695.100979] env[68233]: value = "task-2782027" [ 695.100979] env[68233]: _type = "Task" [ 695.100979] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.115637] env[68233]: DEBUG nova.network.neutron [-] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.116822] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782027, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.314944] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.368968] env[68233]: DEBUG nova.compute.manager [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 695.369218] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.370199] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0706c0ba-c740-4a1b-8103-4c1b7b044819 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.378606] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.378684] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de32c459-6086-4b9e-aa91-159fb09504e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.386081] env[68233]: DEBUG oslo_vmware.api [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 695.386081] env[68233]: value = "task-2782028" [ 695.386081] env[68233]: _type = "Task" [ 695.386081] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.394658] env[68233]: DEBUG oslo_vmware.api [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.467371] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "990e1a66-f2ab-4925-b1da-58cdc41a6315" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.467668] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.498481] env[68233]: ERROR nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [req-8e660ca0-8b53-4597-a784-a8e3447838a2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8e660ca0-8b53-4597-a784-a8e3447838a2"}]} [ 695.518265] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 695.532889] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 695.533096] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 695.545478] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 695.563566] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 695.570202] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 695.570202] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-95680a43-97db-475e-b522-34bdb04ea735 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.575644] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 695.575644] env[68233]: value = "task-2782029" [ 695.575644] env[68233]: _type = "Task" [ 695.575644] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.584228] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782029, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.612334] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782027, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.619947] env[68233]: INFO nova.compute.manager [-] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Took 1.27 seconds to deallocate network for instance. [ 695.895304] env[68233]: DEBUG oslo_vmware.api [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782028, 'name': PowerOffVM_Task, 'duration_secs': 0.183998} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.897880] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 695.898161] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 695.898542] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ae30b8c-46c2-4d58-be00-0f7f1b7728d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.968922] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 695.969165] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 695.969349] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleting the datastore file [datastore1] 0f7d80d2-5c34-42f7-a14a-97f9625675a8 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 695.972755] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-251f55fb-8f19-4731-ac6f-f583fd3f25c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.981150] env[68233]: DEBUG oslo_vmware.api [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for the task: (returnval){ [ 695.981150] env[68233]: value = "task-2782031" [ 695.981150] env[68233]: _type = "Task" [ 695.981150] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.991237] env[68233]: DEBUG oslo_vmware.api [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782031, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.018170] env[68233]: DEBUG nova.compute.manager [req-9e1b2e44-0649-4905-908c-9984c462adb1 req-13bd735b-d6ef-4c56-98d1-31f541b1da17 service nova] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Received event network-vif-deleted-489605bb-d528-4c3f-a258-3e2ff5d49913 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 696.086198] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782029, 'name': CreateSnapshot_Task, 'duration_secs': 0.475822} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.089195] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 696.090354] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c8b3cf-7a58-4673-8f91-6b7091bb8cb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.102249] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfe7f77-50bf-4ff0-ac76-d4c12c46b9ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.115730] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb64e65-cdd7-4366-bb52-c281b6f4fd22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.119434] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782027, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.147860] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.149490] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0a838f-1b6a-4056-9d80-e164e6f2c3ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.157721] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1139eacc-5508-4a53-82af-5c2ad6a390e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.173022] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 696.491516] env[68233]: DEBUG oslo_vmware.api [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Task: {'id': task-2782031, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138329} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.492397] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 696.492479] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 696.492601] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 696.492775] env[68233]: INFO nova.compute.manager [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 696.493028] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 696.493334] env[68233]: DEBUG nova.compute.manager [-] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 696.493334] env[68233]: DEBUG nova.network.neutron [-] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 696.612473] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 696.612841] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9b21b79d-df13-4dd7-b54a-4321f7720fa3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.623736] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782027, 'name': Rename_Task, 'duration_secs': 1.15014} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.624895] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 696.625211] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 696.625211] env[68233]: value = "task-2782032" [ 696.625211] env[68233]: _type = "Task" [ 696.625211] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.625390] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c55221ec-0d96-4c01-bae9-510cc77309d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.634427] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782032, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.635524] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 696.635524] env[68233]: value = "task-2782033" [ 696.635524] env[68233]: _type = "Task" [ 696.635524] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.643233] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.709045] env[68233]: DEBUG nova.scheduler.client.report [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 60 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 696.709390] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 60 to 61 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 696.709567] env[68233]: DEBUG nova.compute.provider_tree [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 697.138219] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782032, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.145772] env[68233]: DEBUG oslo_vmware.api [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782033, 'name': PowerOnVM_Task, 'duration_secs': 0.463438} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.145994] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.146261] env[68233]: INFO nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Took 7.69 seconds to spawn the instance on the hypervisor. [ 697.146440] env[68233]: DEBUG nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.147255] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb5edce-5d12-4c50-87bd-f76119954c99 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.215796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.710s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.216372] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 697.219845] env[68233]: DEBUG nova.network.neutron [-] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.221105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.880s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.221394] env[68233]: DEBUG nova.objects.instance [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lazy-loading 'resources' on Instance uuid 19a1441d-9621-4e6e-ac38-8ad08206facf {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 697.636713] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782032, 'name': CloneVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.663041] env[68233]: INFO nova.compute.manager [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Took 44.05 seconds to build instance. [ 697.722476] env[68233]: DEBUG nova.compute.utils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 697.724528] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 697.724717] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 697.726687] env[68233]: INFO nova.compute.manager [-] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Took 1.23 seconds to deallocate network for instance. [ 697.786471] env[68233]: DEBUG nova.policy [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ed176bda3e54b87b7aabdefd3e01f4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c09004b4e0924f84a7362b2bef420a85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 698.049023] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Successfully created port: fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.065383] env[68233]: DEBUG nova.compute.manager [req-cc0c3910-69bd-4f3b-b87f-ca653e9393cd req-95aadca4-043d-4734-9129-c7610cd24736 service nova] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Received event network-vif-deleted-436ed418-46de-465b-920d-6fddf7ec041d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 698.139355] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782032, 'name': CloneVM_Task, 'duration_secs': 1.043786} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.142079] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Created linked-clone VM from snapshot [ 698.143031] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fe085c-30b6-4e5c-b658-5154dda5d5d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.150378] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Uploading image d029961d-9502-4d1e-9613-6dbc997a01cb {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 698.164677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b41183f3-21ce-48b9-aaba-f12cf12a3e41 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.742s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.188367] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 698.188367] env[68233]: value = "vm-559318" [ 698.188367] env[68233]: _type = "VirtualMachine" [ 698.188367] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 698.188785] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-622af738-91f4-40e3-b249-7427c2c4fa70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.198953] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lease: (returnval){ [ 698.198953] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fcc2-c1ff-2efe-bcb3-8f0761dbe330" [ 698.198953] env[68233]: _type = "HttpNfcLease" [ 698.198953] env[68233]: } obtained for exporting VM: (result){ [ 698.198953] env[68233]: value = "vm-559318" [ 698.198953] env[68233]: _type = "VirtualMachine" [ 698.198953] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 698.199288] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the lease: (returnval){ [ 698.199288] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fcc2-c1ff-2efe-bcb3-8f0761dbe330" [ 698.199288] env[68233]: _type = "HttpNfcLease" [ 698.199288] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 698.206643] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 698.206643] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fcc2-c1ff-2efe-bcb3-8f0761dbe330" [ 698.206643] env[68233]: _type = "HttpNfcLease" [ 698.206643] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 698.209633] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c40564-44d5-4fb4-8b21-2203bfd83b8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.216421] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f8388c-d476-47ea-87dc-406a9504b188 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.247779] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 698.252083] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.253075] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b22cc6c0-3239-4c29-a9c4-61a56510b16b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.260481] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d609dde5-00a8-4cb2-af69-5ef8f2eca039 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.274379] env[68233]: DEBUG nova.compute.provider_tree [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 698.349326] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Successfully created port: c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.603109] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Successfully created port: 554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 698.667692] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.711657] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 698.711657] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fcc2-c1ff-2efe-bcb3-8f0761dbe330" [ 698.711657] env[68233]: _type = "HttpNfcLease" [ 698.711657] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 698.711958] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 698.711958] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fcc2-c1ff-2efe-bcb3-8f0761dbe330" [ 698.711958] env[68233]: _type = "HttpNfcLease" [ 698.711958] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 698.712937] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9821858f-32c6-4ed5-8354-35d4a041016f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.724457] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522329a8-050b-23d7-18fa-93e671d7cce7/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 698.726178] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522329a8-050b-23d7-18fa-93e671d7cce7/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 698.800786] env[68233]: DEBUG nova.scheduler.client.report [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.833209] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-66c02017-4140-4fb6-992d-5217fd5d2fd4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.177874] env[68233]: DEBUG nova.compute.manager [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Received event network-changed-1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 699.177874] env[68233]: DEBUG nova.compute.manager [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Refreshing instance network info cache due to event network-changed-1ac399a3-6f36-48cc-8104-c828a414b1b0. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 699.177874] env[68233]: DEBUG oslo_concurrency.lockutils [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] Acquiring lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.177874] env[68233]: DEBUG oslo_concurrency.lockutils [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] Acquired lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.177874] env[68233]: DEBUG nova.network.neutron [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Refreshing network info cache for port 1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.200012] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.306675] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 699.309394] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.088s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.312486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.170s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.315388] env[68233]: INFO nova.compute.claims [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 699.344835] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 699.346951] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 699.347208] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 699.347413] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 699.347561] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 699.348031] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 699.348378] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 699.348555] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 699.348736] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 699.348906] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 699.349100] env[68233]: DEBUG nova.virt.hardware [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 699.350061] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71661d35-30af-44dc-a748-69e61804f779 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.357088] env[68233]: INFO nova.scheduler.client.report [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted allocations for instance 19a1441d-9621-4e6e-ac38-8ad08206facf [ 699.367558] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bdff11-33a8-48fe-b2cf-225b0b9f8cae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.871120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c884ccbc-4945-4f2d-82ab-2c2914838a45 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "19a1441d-9621-4e6e-ac38-8ad08206facf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.853s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.948815] env[68233]: DEBUG nova.network.neutron [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updated VIF entry in instance network info cache for port 1ac399a3-6f36-48cc-8104-c828a414b1b0. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 699.949302] env[68233]: DEBUG nova.network.neutron [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updating instance_info_cache with network_info: [{"id": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "address": "fa:16:3e:ee:89:df", "network": {"id": "23b82ec7-3a62-44d1-9584-751137707d0d", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1001397050-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "526492ca2e3440dbbe9e7027588f5a0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ac399a3-6f", "ovs_interfaceid": "1ac399a3-6f36-48cc-8104-c828a414b1b0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.097308] env[68233]: DEBUG nova.compute.manager [req-cde272a3-682a-41dc-81f1-bfcd0e096be3 req-5835bd40-23b8-44e6-9e39-0105f26f8b83 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-vif-plugged-fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 700.097764] env[68233]: DEBUG oslo_concurrency.lockutils [req-cde272a3-682a-41dc-81f1-bfcd0e096be3 req-5835bd40-23b8-44e6-9e39-0105f26f8b83 service nova] Acquiring lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.098034] env[68233]: DEBUG oslo_concurrency.lockutils [req-cde272a3-682a-41dc-81f1-bfcd0e096be3 req-5835bd40-23b8-44e6-9e39-0105f26f8b83 service nova] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.098310] env[68233]: DEBUG oslo_concurrency.lockutils [req-cde272a3-682a-41dc-81f1-bfcd0e096be3 req-5835bd40-23b8-44e6-9e39-0105f26f8b83 service nova] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.098566] env[68233]: DEBUG nova.compute.manager [req-cde272a3-682a-41dc-81f1-bfcd0e096be3 req-5835bd40-23b8-44e6-9e39-0105f26f8b83 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] No waiting events found dispatching network-vif-plugged-fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 700.098796] env[68233]: WARNING nova.compute.manager [req-cde272a3-682a-41dc-81f1-bfcd0e096be3 req-5835bd40-23b8-44e6-9e39-0105f26f8b83 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received unexpected event network-vif-plugged-fed32956-586c-44c1-adff-5d2b750f410c for instance with vm_state building and task_state spawning. [ 700.166957] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Successfully updated port: fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 700.452995] env[68233]: DEBUG oslo_concurrency.lockutils [req-29a5dbd2-1b5e-4148-ab37-96a88eca3b89 req-a8d0f895-2c57-4f0f-ab47-ed2660d5248f service nova] Releasing lock "refresh_cache-3c9b701e-6461-45e3-8654-3291c5a487b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.886123] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399a425c-1e32-464d-9cd9-2f738bb28edb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.895443] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6854c023-2edf-4d00-bc48-4cfe69050c25 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.930934] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93466b19-47e1-43d2-a6d2-51275f103eb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.942405] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a2e1a2-aa82-455f-8a47-68a283716483 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.959385] env[68233]: DEBUG nova.compute.provider_tree [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.462968] env[68233]: DEBUG nova.scheduler.client.report [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 701.968431] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.969152] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 701.972540] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.610s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.974494] env[68233]: INFO nova.compute.claims [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 702.124825] env[68233]: DEBUG nova.compute.manager [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-changed-fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 702.125120] env[68233]: DEBUG nova.compute.manager [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Refreshing instance network info cache due to event network-changed-fed32956-586c-44c1-adff-5d2b750f410c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 702.125433] env[68233]: DEBUG oslo_concurrency.lockutils [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] Acquiring lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.125631] env[68233]: DEBUG oslo_concurrency.lockutils [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] Acquired lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.125857] env[68233]: DEBUG nova.network.neutron [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Refreshing network info cache for port fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 702.196919] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Successfully updated port: c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 702.480502] env[68233]: DEBUG nova.compute.utils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 702.484912] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 702.484912] env[68233]: DEBUG nova.network.neutron [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 702.529027] env[68233]: DEBUG nova.policy [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '593e6531c1574bf1ac0e81c5693e24f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e8ffd47b7024dbd9138d2d6963e1eb4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 702.683433] env[68233]: DEBUG nova.network.neutron [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 702.825368] env[68233]: DEBUG nova.network.neutron [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.932075] env[68233]: DEBUG nova.network.neutron [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Successfully created port: 84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.988025] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 703.328811] env[68233]: DEBUG oslo_concurrency.lockutils [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] Releasing lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.329169] env[68233]: DEBUG nova.compute.manager [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-vif-plugged-c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 703.329441] env[68233]: DEBUG oslo_concurrency.lockutils [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] Acquiring lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.329679] env[68233]: DEBUG oslo_concurrency.lockutils [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.329941] env[68233]: DEBUG oslo_concurrency.lockutils [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.330178] env[68233]: DEBUG nova.compute.manager [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] No waiting events found dispatching network-vif-plugged-c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 703.330368] env[68233]: WARNING nova.compute.manager [req-c3a41540-bc9c-44ec-b14d-24ec35d00258 req-f4bb35e8-a19b-40ca-b63e-b78b165d5ae5 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received unexpected event network-vif-plugged-c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e for instance with vm_state building and task_state spawning. [ 703.499823] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b619798-debd-4a39-a9ca-dfa2a7ac72b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.508342] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac04bf5-9954-4223-888a-ecaec73ebbb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.540646] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dfed31-9349-41a2-90e8-12adfbac9719 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.548885] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd179cd-9e05-4cc6-a71b-f0decf158143 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.563926] env[68233]: DEBUG nova.compute.provider_tree [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.998556] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 704.020679] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 704.020931] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.021099] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 704.021290] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.021437] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 704.021581] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 704.021806] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 704.022111] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 704.022442] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 704.022646] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 704.022914] env[68233]: DEBUG nova.virt.hardware [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 704.023849] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a02b37f-731b-4122-9641-1bf2a640bcb8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.032297] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0e1700-b83f-42c4-8766-122e0a9c8051 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.066103] env[68233]: DEBUG nova.scheduler.client.report [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 704.172157] env[68233]: DEBUG nova.compute.manager [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-changed-c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 704.172751] env[68233]: DEBUG nova.compute.manager [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Refreshing instance network info cache due to event network-changed-c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 704.173150] env[68233]: DEBUG oslo_concurrency.lockutils [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] Acquiring lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.173150] env[68233]: DEBUG oslo_concurrency.lockutils [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] Acquired lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.173372] env[68233]: DEBUG nova.network.neutron [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Refreshing network info cache for port c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 704.300893] env[68233]: DEBUG nova.compute.manager [req-fc70b468-39c7-4e3a-b2b7-46dbabde8d55 req-b4c3d415-092d-4a29-83a8-87dea8ec6109 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-vif-plugged-554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 704.300893] env[68233]: DEBUG oslo_concurrency.lockutils [req-fc70b468-39c7-4e3a-b2b7-46dbabde8d55 req-b4c3d415-092d-4a29-83a8-87dea8ec6109 service nova] Acquiring lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.300893] env[68233]: DEBUG oslo_concurrency.lockutils [req-fc70b468-39c7-4e3a-b2b7-46dbabde8d55 req-b4c3d415-092d-4a29-83a8-87dea8ec6109 service nova] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.300893] env[68233]: DEBUG oslo_concurrency.lockutils [req-fc70b468-39c7-4e3a-b2b7-46dbabde8d55 req-b4c3d415-092d-4a29-83a8-87dea8ec6109 service nova] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.300893] env[68233]: DEBUG nova.compute.manager [req-fc70b468-39c7-4e3a-b2b7-46dbabde8d55 req-b4c3d415-092d-4a29-83a8-87dea8ec6109 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] No waiting events found dispatching network-vif-plugged-554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 704.301367] env[68233]: WARNING nova.compute.manager [req-fc70b468-39c7-4e3a-b2b7-46dbabde8d55 req-b4c3d415-092d-4a29-83a8-87dea8ec6109 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received unexpected event network-vif-plugged-554b0c26-9841-4611-927e-bc01c9633734 for instance with vm_state building and task_state spawning. [ 704.416933] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Successfully updated port: 554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 704.447564] env[68233]: DEBUG nova.network.neutron [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Successfully updated port: 84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 704.574041] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.599s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.574041] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 704.574934] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.388s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.576529] env[68233]: INFO nova.compute.claims [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 704.715273] env[68233]: DEBUG nova.network.neutron [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.794531] env[68233]: DEBUG nova.network.neutron [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.919709] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.951341] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.951434] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 704.951606] env[68233]: DEBUG nova.network.neutron [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.085826] env[68233]: DEBUG nova.compute.utils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 705.087926] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 705.088029] env[68233]: DEBUG nova.network.neutron [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 705.146342] env[68233]: DEBUG nova.policy [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3468a1b9d4d4803b2a0aa2d5f14d2d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b8fc190f2d84e2baab337b6b03d5eac', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 705.297027] env[68233]: DEBUG oslo_concurrency.lockutils [req-04077084-f7ed-45fb-b3e1-153f0dcb4f61 req-4143ad8b-aea3-4e30-87f9-7d1ad94c9395 service nova] Releasing lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.297665] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.297920] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 705.497209] env[68233]: DEBUG nova.network.neutron [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Successfully created port: 9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.499835] env[68233]: DEBUG nova.network.neutron [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 705.592439] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 705.686933] env[68233]: DEBUG nova.network.neutron [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updating instance_info_cache with network_info: [{"id": "84572635-d33f-44cd-8a87-a9af1019bf50", "address": "fa:16:3e:e6:c3:88", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84572635-d3", "ovs_interfaceid": "84572635-d33f-44cd-8a87-a9af1019bf50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.839026] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 706.083168] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bad8d66-a17f-4d8c-a8db-186ebc2581ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.090517] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3085f9-746a-4e13-b295-d95c6508e488 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.129187] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6646176-71e1-4dc0-a804-a3d9c49baaf3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.137225] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d06e57a-5107-4074-94af-60bd58dd945c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.151454] env[68233]: DEBUG nova.compute.provider_tree [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 706.192034] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.192415] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Instance network_info: |[{"id": "84572635-d33f-44cd-8a87-a9af1019bf50", "address": "fa:16:3e:e6:c3:88", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84572635-d3", "ovs_interfaceid": "84572635-d33f-44cd-8a87-a9af1019bf50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 706.192875] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:c3:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84572635-d33f-44cd-8a87-a9af1019bf50', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 706.200691] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Creating folder: Project (0e8ffd47b7024dbd9138d2d6963e1eb4). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.203760] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c9b1670-c4c3-4482-92f3-1179816e9cbb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.215102] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Created folder: Project (0e8ffd47b7024dbd9138d2d6963e1eb4) in parent group-v559223. [ 706.215316] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Creating folder: Instances. Parent ref: group-v559319. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 706.215559] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d033f3b8-2e8d-4c86-aed8-a2367345683c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.225373] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Created folder: Instances in parent group-v559319. [ 706.225639] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 706.225796] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 706.226016] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34fdf7cf-359c-46fe-b9cd-32701b6969e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.245942] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 706.245942] env[68233]: value = "task-2782037" [ 706.245942] env[68233]: _type = "Task" [ 706.245942] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.253806] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782037, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.299626] env[68233]: DEBUG nova.compute.manager [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Received event network-vif-plugged-84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 706.299842] env[68233]: DEBUG oslo_concurrency.lockutils [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] Acquiring lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.300125] env[68233]: DEBUG oslo_concurrency.lockutils [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.301881] env[68233]: DEBUG oslo_concurrency.lockutils [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.301881] env[68233]: DEBUG nova.compute.manager [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] No waiting events found dispatching network-vif-plugged-84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 706.301881] env[68233]: WARNING nova.compute.manager [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Received unexpected event network-vif-plugged-84572635-d33f-44cd-8a87-a9af1019bf50 for instance with vm_state building and task_state spawning. [ 706.301881] env[68233]: DEBUG nova.compute.manager [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Received event network-changed-84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 706.301881] env[68233]: DEBUG nova.compute.manager [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Refreshing instance network info cache due to event network-changed-84572635-d33f-44cd-8a87-a9af1019bf50. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 706.302486] env[68233]: DEBUG oslo_concurrency.lockutils [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] Acquiring lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.302486] env[68233]: DEBUG oslo_concurrency.lockutils [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] Acquired lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.302486] env[68233]: DEBUG nova.network.neutron [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Refreshing network info cache for port 84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.428298] env[68233]: DEBUG nova.compute.manager [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-changed-554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 706.428551] env[68233]: DEBUG nova.compute.manager [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Refreshing instance network info cache due to event network-changed-554b0c26-9841-4611-927e-bc01c9633734. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 706.428684] env[68233]: DEBUG oslo_concurrency.lockutils [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] Acquiring lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.631018] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 706.638163] env[68233]: DEBUG nova.network.neutron [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Updating instance_info_cache with network_info: [{"id": "fed32956-586c-44c1-adff-5d2b750f410c", "address": "fa:16:3e:dc:f3:30", "network": {"id": "72984a78-0bdd-4376-889d-8c25b4cc19fa", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2104826884", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfed32956-58", "ovs_interfaceid": "fed32956-586c-44c1-adff-5d2b750f410c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e", "address": "fa:16:3e:4a:57:0d", "network": {"id": "bc9843a8-0b02-4acf-aa28-81b7761a95b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-117391937", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8ec87e9-21", "ovs_interfaceid": "c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "554b0c26-9841-4611-927e-bc01c9633734", "address": "fa:16:3e:a8:a6:bd", "network": {"id": "72984a78-0bdd-4376-889d-8c25b4cc19fa", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2104826884", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap554b0c26-98", "ovs_interfaceid": "554b0c26-9841-4611-927e-bc01c9633734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.654881] env[68233]: DEBUG nova.scheduler.client.report [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 706.660653] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 706.660881] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 706.661068] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 706.661258] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 706.661402] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 706.661548] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 706.661754] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 706.661912] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 706.662091] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 706.662261] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 706.662468] env[68233]: DEBUG nova.virt.hardware [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 706.663657] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6460fe9d-c098-4641-b371-e1c9b9d58a80 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.673405] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2db830e-76a5-4b8e-bcc5-881af6e51f46 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.755370] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782037, 'name': CreateVM_Task, 'duration_secs': 0.462153} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.755507] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 706.756186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.756352] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.756685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 706.756924] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90726a6a-3fdc-4a54-9406-755a82939f4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.761246] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 706.761246] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525c8de5-ec54-9dd5-ac34-d4d072cc1f9f" [ 706.761246] env[68233]: _type = "Task" [ 706.761246] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.768828] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525c8de5-ec54-9dd5-ac34-d4d072cc1f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.988667] env[68233]: DEBUG nova.network.neutron [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Successfully updated port: 9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 707.004018] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522329a8-050b-23d7-18fa-93e671d7cce7/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 707.004018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df64480d-cebd-4d4a-a3eb-ae2d5fdd700d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.009966] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522329a8-050b-23d7-18fa-93e671d7cce7/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 707.010287] env[68233]: ERROR oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522329a8-050b-23d7-18fa-93e671d7cce7/disk-0.vmdk due to incomplete transfer. [ 707.010710] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9073f8ee-3df4-4b7a-9a27-56de65b3e694 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.017267] env[68233]: DEBUG oslo_vmware.rw_handles [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522329a8-050b-23d7-18fa-93e671d7cce7/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 707.017473] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Uploaded image d029961d-9502-4d1e-9613-6dbc997a01cb to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 707.019687] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 707.019951] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-16162b2e-dbde-4049-93c2-941538f11168 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.025684] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 707.025684] env[68233]: value = "task-2782038" [ 707.025684] env[68233]: _type = "Task" [ 707.025684] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.033457] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782038, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.041276] env[68233]: DEBUG nova.network.neutron [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updated VIF entry in instance network info cache for port 84572635-d33f-44cd-8a87-a9af1019bf50. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.041647] env[68233]: DEBUG nova.network.neutron [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updating instance_info_cache with network_info: [{"id": "84572635-d33f-44cd-8a87-a9af1019bf50", "address": "fa:16:3e:e6:c3:88", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84572635-d3", "ovs_interfaceid": "84572635-d33f-44cd-8a87-a9af1019bf50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.140955] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Releasing lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.141447] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance network_info: |[{"id": "fed32956-586c-44c1-adff-5d2b750f410c", "address": "fa:16:3e:dc:f3:30", "network": {"id": "72984a78-0bdd-4376-889d-8c25b4cc19fa", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2104826884", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfed32956-58", "ovs_interfaceid": "fed32956-586c-44c1-adff-5d2b750f410c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e", "address": "fa:16:3e:4a:57:0d", "network": {"id": "bc9843a8-0b02-4acf-aa28-81b7761a95b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-117391937", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8ec87e9-21", "ovs_interfaceid": "c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "554b0c26-9841-4611-927e-bc01c9633734", "address": "fa:16:3e:a8:a6:bd", "network": {"id": "72984a78-0bdd-4376-889d-8c25b4cc19fa", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2104826884", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap554b0c26-98", "ovs_interfaceid": "554b0c26-9841-4611-927e-bc01c9633734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 707.141759] env[68233]: DEBUG oslo_concurrency.lockutils [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] Acquired lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.141941] env[68233]: DEBUG nova.network.neutron [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Refreshing network info cache for port 554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.143184] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:f3:30', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fed32956-586c-44c1-adff-5d2b750f410c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:57:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:a6:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '554b0c26-9841-4611-927e-bc01c9633734', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 707.155601] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Creating folder: Project (c09004b4e0924f84a7362b2bef420a85). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.158965] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-099db86b-a5de-4dc2-8da8-8084e68bdda5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.167463] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.592s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.168071] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 707.171902] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.062s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.173782] env[68233]: INFO nova.compute.claims [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 707.176494] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Created folder: Project (c09004b4e0924f84a7362b2bef420a85) in parent group-v559223. [ 707.176664] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Creating folder: Instances. Parent ref: group-v559322. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 707.177192] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5aac59f8-f2c2-4add-929f-d2b2be54f7e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.186215] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Created folder: Instances in parent group-v559322. [ 707.186446] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 707.186641] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 707.186840] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a58a89e9-d2a3-4fc0-ab19-d569dc63c112 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.213432] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 707.213432] env[68233]: value = "task-2782041" [ 707.213432] env[68233]: _type = "Task" [ 707.213432] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.221404] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782041, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.271237] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525c8de5-ec54-9dd5-ac34-d4d072cc1f9f, 'name': SearchDatastore_Task, 'duration_secs': 0.010827} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.273830] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.274089] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.274330] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.274474] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.274649] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.274919] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-03dce175-77aa-47a5-a76e-3c970a0da50c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.283380] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.283561] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.284303] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d67e3cd6-e7fb-472e-b5a3-5ece6882f585 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.289388] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 707.289388] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52996a60-b9a5-e582-978d-916407eff835" [ 707.289388] env[68233]: _type = "Task" [ 707.289388] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.297114] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52996a60-b9a5-e582-978d-916407eff835, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.494317] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "refresh_cache-b056fbf4-4873-4ec9-905a-ad973c8fb27a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.494472] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired lock "refresh_cache-b056fbf4-4873-4ec9-905a-ad973c8fb27a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.494632] env[68233]: DEBUG nova.network.neutron [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 707.500615] env[68233]: DEBUG nova.network.neutron [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Updated VIF entry in instance network info cache for port 554b0c26-9841-4611-927e-bc01c9633734. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.501129] env[68233]: DEBUG nova.network.neutron [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Updating instance_info_cache with network_info: [{"id": "fed32956-586c-44c1-adff-5d2b750f410c", "address": "fa:16:3e:dc:f3:30", "network": {"id": "72984a78-0bdd-4376-889d-8c25b4cc19fa", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2104826884", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.173", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfed32956-58", "ovs_interfaceid": "fed32956-586c-44c1-adff-5d2b750f410c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e", "address": "fa:16:3e:4a:57:0d", "network": {"id": "bc9843a8-0b02-4acf-aa28-81b7761a95b8", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-117391937", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.113", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8ec87e9-21", "ovs_interfaceid": "c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "554b0c26-9841-4611-927e-bc01c9633734", "address": "fa:16:3e:a8:a6:bd", "network": {"id": "72984a78-0bdd-4376-889d-8c25b4cc19fa", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-2104826884", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap554b0c26-98", "ovs_interfaceid": "554b0c26-9841-4611-927e-bc01c9633734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.536540] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782038, 'name': Destroy_Task, 'duration_secs': 0.331329} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.536816] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Destroyed the VM [ 707.537056] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 707.537320] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e74fac6e-2daf-4f14-b5f0-20171c66cdac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.543703] env[68233]: DEBUG oslo_concurrency.lockutils [req-15d4bd56-e09e-4129-9652-e1c48e8c87f6 req-d829ea6b-caf1-4cd2-a7a5-8e8d15781ce4 service nova] Releasing lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.544819] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 707.544819] env[68233]: value = "task-2782042" [ 707.544819] env[68233]: _type = "Task" [ 707.544819] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.552927] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782042, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.680816] env[68233]: DEBUG nova.compute.utils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 707.683363] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 707.683547] env[68233]: DEBUG nova.network.neutron [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.725299] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782041, 'name': CreateVM_Task, 'duration_secs': 0.464421} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.726029] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.726382] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.726547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.727207] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.727207] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad76fe08-3c61-400a-bc89-2a8d43b663d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.731483] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 707.731483] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525f3941-5377-621a-0071-2a6092fbd851" [ 707.731483] env[68233]: _type = "Task" [ 707.731483] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.739293] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525f3941-5377-621a-0071-2a6092fbd851, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.770515] env[68233]: DEBUG nova.policy [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 707.800640] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52996a60-b9a5-e582-978d-916407eff835, 'name': SearchDatastore_Task, 'duration_secs': 0.008421} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.801241] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-753582b9-a050-46df-884f-683fd6c4cacc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.806054] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 707.806054] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529ac7ab-e008-db5b-fd48-d7078a7364b5" [ 707.806054] env[68233]: _type = "Task" [ 707.806054] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.815017] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529ac7ab-e008-db5b-fd48-d7078a7364b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.005668] env[68233]: DEBUG oslo_concurrency.lockutils [req-7dbda488-a65f-4afc-aa85-21a42cd1469e req-74eb76eb-9d21-4759-96f5-4bcb032ea13c service nova] Releasing lock "refresh_cache-080ab438-269b-427a-9ee9-71c59d9c2a91" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.041304] env[68233]: DEBUG nova.network.neutron [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 708.058765] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782042, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.172722] env[68233]: DEBUG nova.network.neutron [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Successfully created port: fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.187018] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 708.195321] env[68233]: DEBUG nova.network.neutron [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Updating instance_info_cache with network_info: [{"id": "9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673", "address": "fa:16:3e:b3:dd:9e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9fa3f8-4a", "ovs_interfaceid": "9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.246136] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525f3941-5377-621a-0071-2a6092fbd851, 'name': SearchDatastore_Task, 'duration_secs': 0.008762} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.246464] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.246695] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 708.246898] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.318308] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529ac7ab-e008-db5b-fd48-d7078a7364b5, 'name': SearchDatastore_Task, 'duration_secs': 0.009206} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.318308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.318532] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f2af60e6-496c-4edb-9e99-4b45fa94bfeb/f2af60e6-496c-4edb-9e99-4b45fa94bfeb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.319121] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.319121] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.319278] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df8bc967-35be-4b96-adcc-412eb762d8b4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.321520] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3f5c845-9910-4094-b230-f5999fa734e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.330510] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 708.330510] env[68233]: value = "task-2782043" [ 708.330510] env[68233]: _type = "Task" [ 708.330510] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.335779] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.335981] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 708.339606] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9640bd6-d941-4c9f-bf41-7db10bd97a4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.341847] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.347400] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 708.347400] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521fca63-0298-3046-bc86-52614de08acb" [ 708.347400] env[68233]: _type = "Task" [ 708.347400] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.355183] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521fca63-0298-3046-bc86-52614de08acb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.559600] env[68233]: DEBUG oslo_vmware.api [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782042, 'name': RemoveSnapshot_Task, 'duration_secs': 0.551804} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.563110] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 708.563694] env[68233]: INFO nova.compute.manager [None req-7ca6f2dd-430f-4cff-a631-ea2054666058 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Took 13.53 seconds to snapshot the instance on the hypervisor. [ 708.568110] env[68233]: DEBUG nova.compute.manager [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Received event network-vif-plugged-9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 708.568157] env[68233]: DEBUG oslo_concurrency.lockutils [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] Acquiring lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 708.568591] env[68233]: DEBUG oslo_concurrency.lockutils [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.568591] env[68233]: DEBUG oslo_concurrency.lockutils [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.569510] env[68233]: DEBUG nova.compute.manager [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] No waiting events found dispatching network-vif-plugged-9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 708.569510] env[68233]: WARNING nova.compute.manager [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Received unexpected event network-vif-plugged-9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 for instance with vm_state building and task_state spawning. [ 708.569510] env[68233]: DEBUG nova.compute.manager [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Received event network-changed-9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 708.569510] env[68233]: DEBUG nova.compute.manager [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Refreshing instance network info cache due to event network-changed-9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 708.569510] env[68233]: DEBUG oslo_concurrency.lockutils [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] Acquiring lock "refresh_cache-b056fbf4-4873-4ec9-905a-ad973c8fb27a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 708.696501] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Releasing lock "refresh_cache-b056fbf4-4873-4ec9-905a-ad973c8fb27a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.696829] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Instance network_info: |[{"id": "9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673", "address": "fa:16:3e:b3:dd:9e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9fa3f8-4a", "ovs_interfaceid": "9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 708.697023] env[68233]: DEBUG oslo_concurrency.lockutils [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] Acquired lock "refresh_cache-b056fbf4-4873-4ec9-905a-ad973c8fb27a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 708.697242] env[68233]: DEBUG nova.network.neutron [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Refreshing network info cache for port 9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 708.698406] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:dd:9e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 708.707588] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 708.707588] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 708.707588] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6497229-fe6a-4ede-8d0d-f09902c68bf4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.731685] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 708.731685] env[68233]: value = "task-2782044" [ 708.731685] env[68233]: _type = "Task" [ 708.731685] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.742585] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782044, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.744395] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ae5ec0-f44d-4c3e-8554-ec06f065d134 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.751730] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbc98cb-ab54-4b85-8810-9a32f788e09f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.784392] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe44453-1b3b-444b-aec5-cf630a329226 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.791746] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ec3cec-4a3d-4039-b98e-e8c91cc5fd1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.805022] env[68233]: DEBUG nova.compute.provider_tree [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.843359] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.437565} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.843563] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f2af60e6-496c-4edb-9e99-4b45fa94bfeb/f2af60e6-496c-4edb-9e99-4b45fa94bfeb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 708.843774] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 708.844019] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29d6b50c-cde6-4494-b88d-13e4fb35749e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.853139] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 708.853139] env[68233]: value = "task-2782045" [ 708.853139] env[68233]: _type = "Task" [ 708.853139] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.859495] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521fca63-0298-3046-bc86-52614de08acb, 'name': SearchDatastore_Task, 'duration_secs': 0.008857} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.860730] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31621775-65e4-42e3-a762-e651300d0918 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.865759] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782045, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.869017] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 708.869017] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5231c9e2-21e3-d643-a152-236f45b5b5a1" [ 708.869017] env[68233]: _type = "Task" [ 708.869017] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.876769] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5231c9e2-21e3-d643-a152-236f45b5b5a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.197804] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 709.222278] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 709.222579] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.222747] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 709.222929] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.223087] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 709.223240] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 709.223449] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 709.223748] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 709.223748] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 709.223912] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 709.224149] env[68233]: DEBUG nova.virt.hardware [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 709.224984] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c3b104-b560-422f-81fb-404322c8358f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.235850] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db39b4e-141c-4557-bc7e-bbc3fd9b428b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.247630] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782044, 'name': CreateVM_Task, 'duration_secs': 0.373805} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.258523] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 709.258523] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.258523] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.258523] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 709.258523] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5e1bdc4-e06a-4322-9afe-08af1cc463e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.263681] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 709.263681] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2c11b-b22b-b328-cd10-2f330f0b4f8b" [ 709.263681] env[68233]: _type = "Task" [ 709.263681] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.270611] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2c11b-b22b-b328-cd10-2f330f0b4f8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.308383] env[68233]: DEBUG nova.scheduler.client.report [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 709.365231] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782045, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067669} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.365496] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.367057] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d93f1a-b80e-4ec4-b978-7f3639f5f1cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.393065] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] f2af60e6-496c-4edb-9e99-4b45fa94bfeb/f2af60e6-496c-4edb-9e99-4b45fa94bfeb.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.396781] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60052495-4964-449e-b568-6a1c8859976f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.416812] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5231c9e2-21e3-d643-a152-236f45b5b5a1, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.417441] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.417714] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 080ab438-269b-427a-9ee9-71c59d9c2a91/080ab438-269b-427a-9ee9-71c59d9c2a91.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 709.418028] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a0d0dc8-6ece-41f7-a1e5-0dd87ed50c92 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.422568] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 709.422568] env[68233]: value = "task-2782046" [ 709.422568] env[68233]: _type = "Task" [ 709.422568] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.426850] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 709.426850] env[68233]: value = "task-2782047" [ 709.426850] env[68233]: _type = "Task" [ 709.426850] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.434128] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782046, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.438921] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782047, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.452876] env[68233]: DEBUG nova.network.neutron [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Updated VIF entry in instance network info cache for port 9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 709.453353] env[68233]: DEBUG nova.network.neutron [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Updating instance_info_cache with network_info: [{"id": "9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673", "address": "fa:16:3e:b3:dd:9e", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.88", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a9fa3f8-4a", "ovs_interfaceid": "9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.776000] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2c11b-b22b-b328-cd10-2f330f0b4f8b, 'name': SearchDatastore_Task, 'duration_secs': 0.009731} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.776408] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.776977] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 709.776977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 709.777141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 709.777248] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 709.777533] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79b96bb2-b93a-4331-9aff-c2996b694c56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.793067] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 709.793067] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 709.793340] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daaa28d8-2069-4c92-bea8-9ab6fa97cb19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.799901] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 709.799901] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5245c01a-88bf-c0c1-c252-933a526d6057" [ 709.799901] env[68233]: _type = "Task" [ 709.799901] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.811453] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5245c01a-88bf-c0c1-c252-933a526d6057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.815454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.816027] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 709.818973] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.491s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.820529] env[68233]: INFO nova.compute.claims [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.937822] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.943914] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782047, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.455763} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.944643] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 080ab438-269b-427a-9ee9-71c59d9c2a91/080ab438-269b-427a-9ee9-71c59d9c2a91.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.944906] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.945206] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0bdf008-db12-4743-8924-f31b47342f16 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.952136] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 709.952136] env[68233]: value = "task-2782048" [ 709.952136] env[68233]: _type = "Task" [ 709.952136] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.956250] env[68233]: DEBUG oslo_concurrency.lockutils [req-64b568cb-6149-49f1-bb37-6319c461aa6e req-8419151f-9171-4bc2-8eb0-b08220e274cc service nova] Releasing lock "refresh_cache-b056fbf4-4873-4ec9-905a-ad973c8fb27a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.962361] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782048, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.973117] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "769956c6-7824-41db-9779-fc1b5f53dd94" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.973446] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "769956c6-7824-41db-9779-fc1b5f53dd94" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.974950] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "769956c6-7824-41db-9779-fc1b5f53dd94-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.974950] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "769956c6-7824-41db-9779-fc1b5f53dd94-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.974950] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "769956c6-7824-41db-9779-fc1b5f53dd94-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.976298] env[68233]: INFO nova.compute.manager [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Terminating instance [ 710.223864] env[68233]: DEBUG nova.compute.manager [req-3ed59755-1bd5-46f2-b66a-6cbe10e09eaf req-596822e1-febf-4870-a6bd-d29420bca098 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Received event network-vif-plugged-fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 710.224237] env[68233]: DEBUG oslo_concurrency.lockutils [req-3ed59755-1bd5-46f2-b66a-6cbe10e09eaf req-596822e1-febf-4870-a6bd-d29420bca098 service nova] Acquiring lock "5ed44950-8e9b-4f42-9611-d5bff01dc905-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.224616] env[68233]: DEBUG oslo_concurrency.lockutils [req-3ed59755-1bd5-46f2-b66a-6cbe10e09eaf req-596822e1-febf-4870-a6bd-d29420bca098 service nova] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.224871] env[68233]: DEBUG oslo_concurrency.lockutils [req-3ed59755-1bd5-46f2-b66a-6cbe10e09eaf req-596822e1-febf-4870-a6bd-d29420bca098 service nova] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.225688] env[68233]: DEBUG nova.compute.manager [req-3ed59755-1bd5-46f2-b66a-6cbe10e09eaf req-596822e1-febf-4870-a6bd-d29420bca098 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] No waiting events found dispatching network-vif-plugged-fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 710.225688] env[68233]: WARNING nova.compute.manager [req-3ed59755-1bd5-46f2-b66a-6cbe10e09eaf req-596822e1-febf-4870-a6bd-d29420bca098 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Received unexpected event network-vif-plugged-fd3d32d5-4e84-47da-9098-50381834dfef for instance with vm_state building and task_state spawning. [ 710.299893] env[68233]: DEBUG nova.network.neutron [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Successfully updated port: fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 710.313012] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5245c01a-88bf-c0c1-c252-933a526d6057, 'name': SearchDatastore_Task, 'duration_secs': 0.042156} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.314971] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b3880a9-8d86-47c4-9874-2f9e2337d1d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.323020] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 710.323020] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3669c-5410-b891-75d2-0391fb920d3e" [ 710.323020] env[68233]: _type = "Task" [ 710.323020] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.326105] env[68233]: DEBUG nova.compute.utils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 710.332188] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 710.332188] env[68233]: DEBUG nova.network.neutron [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 710.344869] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3669c-5410-b891-75d2-0391fb920d3e, 'name': SearchDatastore_Task, 'duration_secs': 0.009224} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.344869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 710.345271] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] b056fbf4-4873-4ec9-905a-ad973c8fb27a/b056fbf4-4873-4ec9-905a-ad973c8fb27a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 710.345520] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28d91783-7cae-4739-8831-0a787478e47e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.353010] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 710.353010] env[68233]: value = "task-2782049" [ 710.353010] env[68233]: _type = "Task" [ 710.353010] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.362641] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.422556] env[68233]: DEBUG nova.policy [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '29c87b36bb564a1fb5ba3febc6f8c0d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e86ceada21f1436ea2cddc9be5a41864', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 710.432840] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782046, 'name': ReconfigVM_Task, 'duration_secs': 0.569362} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.433101] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Reconfigured VM instance instance-00000020 to attach disk [datastore2] f2af60e6-496c-4edb-9e99-4b45fa94bfeb/f2af60e6-496c-4edb-9e99-4b45fa94bfeb.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.433708] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edb62eae-3c41-4ef4-b1b9-92010e8de8e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.444021] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 710.444021] env[68233]: value = "task-2782050" [ 710.444021] env[68233]: _type = "Task" [ 710.444021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.448729] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782050, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.463422] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782048, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063703} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.463686] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 710.468022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f97ebb-f1a0-4435-938d-63624f0c7779 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.487526] env[68233]: DEBUG nova.compute.manager [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 710.487736] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 710.498075] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 080ab438-269b-427a-9ee9-71c59d9c2a91/080ab438-269b-427a-9ee9-71c59d9c2a91.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 710.503245] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3fa74a9-9269-4a86-ab8a-30fb8fac2f7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.505914] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ded30d2b-3627-4063-ae3d-a0d1a189bee8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.526199] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 710.526977] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0533ec96-0bcb-45bc-a922-74b441171d03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.529575] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 710.529575] env[68233]: value = "task-2782051" [ 710.529575] env[68233]: _type = "Task" [ 710.529575] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.536937] env[68233]: DEBUG oslo_vmware.api [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 710.536937] env[68233]: value = "task-2782052" [ 710.536937] env[68233]: _type = "Task" [ 710.536937] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.544961] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.550878] env[68233]: DEBUG oslo_vmware.api [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.805407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-5ed44950-8e9b-4f42-9611-d5bff01dc905" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.805590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-5ed44950-8e9b-4f42-9611-d5bff01dc905" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.805871] env[68233]: DEBUG nova.network.neutron [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 710.831141] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 710.863616] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468696} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.863890] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] b056fbf4-4873-4ec9-905a-ad973c8fb27a/b056fbf4-4873-4ec9-905a-ad973c8fb27a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 710.864256] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 710.865608] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36f109f0-e559-41c6-b4e8-169ee2f4a204 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.874820] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 710.874820] env[68233]: value = "task-2782053" [ 710.874820] env[68233]: _type = "Task" [ 710.874820] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.889564] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782053, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.954123] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782050, 'name': Rename_Task, 'duration_secs': 0.315108} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.954519] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.954825] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e16105f-a2b9-4c42-96a0-12d4faeb392a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.959320] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b703a6d1-fcda-46f4-ab85-f3ecb5072fe8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.971389] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08027d38-3d13-4454-a85c-497600c6bab3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.975131] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 710.975131] env[68233]: value = "task-2782054" [ 710.975131] env[68233]: _type = "Task" [ 710.975131] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.005320] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fac3687-8412-48fc-8f71-289ed8a8c58e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.012401] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782054, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.017955] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d78afc-cdd5-4c2d-8d3c-c8ddf9b13a9c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.022872] env[68233]: DEBUG nova.network.neutron [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Successfully created port: 5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.034578] env[68233]: DEBUG nova.compute.provider_tree [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.048925] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.051545] env[68233]: DEBUG oslo_vmware.api [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782052, 'name': PowerOffVM_Task, 'duration_secs': 0.316816} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.051889] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 711.052094] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 711.052439] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edcf0f81-cb82-4c4d-9a40-ab0499d634a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.229024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 711.229024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 711.229532] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Deleting the datastore file [datastore2] 769956c6-7824-41db-9779-fc1b5f53dd94 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 711.229532] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-78dec1be-3420-42f0-b0fb-84cba622bae3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.236912] env[68233]: DEBUG oslo_vmware.api [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for the task: (returnval){ [ 711.236912] env[68233]: value = "task-2782056" [ 711.236912] env[68233]: _type = "Task" [ 711.236912] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.246780] env[68233]: DEBUG oslo_vmware.api [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.343126] env[68233]: DEBUG nova.network.neutron [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.387679] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782053, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.268313} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.389125] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 711.389451] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa7d5e3-bec2-45b5-b2d6-38c7e5c3035d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.412629] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] b056fbf4-4873-4ec9-905a-ad973c8fb27a/b056fbf4-4873-4ec9-905a-ad973c8fb27a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.415645] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3a11bbc-1970-4e06-946e-307e9920a4be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.439487] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 711.439487] env[68233]: value = "task-2782057" [ 711.439487] env[68233]: _type = "Task" [ 711.439487] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.446676] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782057, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.484600] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782054, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.547627] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782051, 'name': ReconfigVM_Task, 'duration_secs': 0.773625} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.548146] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 080ab438-269b-427a-9ee9-71c59d9c2a91/080ab438-269b-427a-9ee9-71c59d9c2a91.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 711.550859] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-928e0a37-fb1f-42ca-82d1-a58bfbe95ce3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.556957] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 711.556957] env[68233]: value = "task-2782058" [ 711.556957] env[68233]: _type = "Task" [ 711.556957] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.568777] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782058, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.589193] env[68233]: DEBUG nova.scheduler.client.report [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 711.591780] env[68233]: DEBUG nova.compute.provider_tree [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 61 to 62 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 711.592115] env[68233]: DEBUG nova.compute.provider_tree [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 711.741891] env[68233]: DEBUG nova.network.neutron [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Updating instance_info_cache with network_info: [{"id": "fd3d32d5-4e84-47da-9098-50381834dfef", "address": "fa:16:3e:49:f8:1f", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd3d32d5-4e", "ovs_interfaceid": "fd3d32d5-4e84-47da-9098-50381834dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.749521] env[68233]: DEBUG oslo_vmware.api [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Task: {'id': task-2782056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149357} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.749786] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 711.749965] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 711.750216] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 711.750418] env[68233]: INFO nova.compute.manager [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Took 1.26 seconds to destroy the instance on the hypervisor. [ 711.750664] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 711.750850] env[68233]: DEBUG nova.compute.manager [-] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 711.750946] env[68233]: DEBUG nova.network.neutron [-] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 711.855014] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 711.882230] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 711.882647] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 711.882939] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 711.883275] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 711.883532] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 711.883788] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 711.884150] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 711.884427] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 711.884820] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 711.884984] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 711.885296] env[68233]: DEBUG nova.virt.hardware [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 711.887026] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0896cbda-d7a1-496b-b4a0-86c60fb8c958 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.896762] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509709c4-17e0-4d08-af15-a19bc9b3a730 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.947633] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782057, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.984668] env[68233]: DEBUG oslo_vmware.api [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782054, 'name': PowerOnVM_Task, 'duration_secs': 0.602797} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.984668] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.984668] env[68233]: INFO nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Took 7.99 seconds to spawn the instance on the hypervisor. [ 711.985450] env[68233]: DEBUG nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.985616] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e52456-f98a-4d1b-bfb4-242bda7e3736 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.050129] env[68233]: DEBUG nova.compute.manager [req-97b7f93b-3248-4008-a932-e90906681386 req-3f4a4531-3102-4dc1-967c-3d68e367224f service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Received event network-vif-deleted-69a128c9-1103-4a47-9adf-f0e87598c6c4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 712.050439] env[68233]: INFO nova.compute.manager [req-97b7f93b-3248-4008-a932-e90906681386 req-3f4a4531-3102-4dc1-967c-3d68e367224f service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Neutron deleted interface 69a128c9-1103-4a47-9adf-f0e87598c6c4; detaching it from the instance and deleting it from the info cache [ 712.050785] env[68233]: DEBUG nova.network.neutron [req-97b7f93b-3248-4008-a932-e90906681386 req-3f4a4531-3102-4dc1-967c-3d68e367224f service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.067223] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782058, 'name': Rename_Task, 'duration_secs': 0.461794} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.067492] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.067742] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c798e206-21d8-46ef-b625-fe35798a9202 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.075349] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 712.075349] env[68233]: value = "task-2782059" [ 712.075349] env[68233]: _type = "Task" [ 712.075349] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.083412] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.103335] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.284s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.103797] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 712.106383] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 30.388s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.244834] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-5ed44950-8e9b-4f42-9611-d5bff01dc905" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.245193] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Instance network_info: |[{"id": "fd3d32d5-4e84-47da-9098-50381834dfef", "address": "fa:16:3e:49:f8:1f", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd3d32d5-4e", "ovs_interfaceid": "fd3d32d5-4e84-47da-9098-50381834dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 712.245616] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:f8:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd3d32d5-4e84-47da-9098-50381834dfef', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.253407] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 712.253633] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.253854] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c336066-fb0f-4cdc-88f2-1a0ee001f773 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.273594] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.273594] env[68233]: value = "task-2782060" [ 712.273594] env[68233]: _type = "Task" [ 712.273594] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.281062] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782060, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.332677] env[68233]: DEBUG nova.compute.manager [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Received event network-changed-fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 712.332933] env[68233]: DEBUG nova.compute.manager [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Refreshing instance network info cache due to event network-changed-fd3d32d5-4e84-47da-9098-50381834dfef. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 712.333413] env[68233]: DEBUG oslo_concurrency.lockutils [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] Acquiring lock "refresh_cache-5ed44950-8e9b-4f42-9611-d5bff01dc905" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.333573] env[68233]: DEBUG oslo_concurrency.lockutils [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] Acquired lock "refresh_cache-5ed44950-8e9b-4f42-9611-d5bff01dc905" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.333743] env[68233]: DEBUG nova.network.neutron [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Refreshing network info cache for port fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.448853] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782057, 'name': ReconfigVM_Task, 'duration_secs': 0.780603} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.449187] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Reconfigured VM instance instance-00000021 to attach disk [datastore2] b056fbf4-4873-4ec9-905a-ad973c8fb27a/b056fbf4-4873-4ec9-905a-ad973c8fb27a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.449843] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5dea50a2-38ba-40b7-a8f5-0d62baa4e2e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.456084] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 712.456084] env[68233]: value = "task-2782061" [ 712.456084] env[68233]: _type = "Task" [ 712.456084] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.465718] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782061, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.503665] env[68233]: INFO nova.compute.manager [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Took 46.39 seconds to build instance. [ 712.532698] env[68233]: DEBUG nova.network.neutron [-] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.556198] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b61150b-4580-4471-b838-0ef011cbda51 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.566443] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae75024c-24de-46fe-8d9b-4de3fb6160bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.594069] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782059, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.623159] env[68233]: DEBUG nova.compute.utils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 712.625869] env[68233]: DEBUG nova.compute.manager [req-97b7f93b-3248-4008-a932-e90906681386 req-3f4a4531-3102-4dc1-967c-3d68e367224f service nova] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Detach interface failed, port_id=69a128c9-1103-4a47-9adf-f0e87598c6c4, reason: Instance 769956c6-7824-41db-9779-fc1b5f53dd94 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 712.626760] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 712.626930] env[68233]: DEBUG nova.network.neutron [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 712.676697] env[68233]: DEBUG nova.policy [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37e40c4399e3484cb879f0b64e3b90e3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b675b80ce4ea4ef5a8e9fb43cf2385f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 712.787454] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782060, 'name': CreateVM_Task, 'duration_secs': 0.451887} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.788159] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 712.788880] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.789092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.789416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 712.789683] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73328d5c-f72a-4f23-bac0-4db911588bb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.797026] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 712.797026] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5243eb14-fc3f-db76-73e0-55972e6739e2" [ 712.797026] env[68233]: _type = "Task" [ 712.797026] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.808417] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5243eb14-fc3f-db76-73e0-55972e6739e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.968949] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782061, 'name': Rename_Task, 'duration_secs': 0.148681} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.969493] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 712.969853] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f11fec8-8bcf-4903-a3a2-0441c9a88487 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.978768] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 712.978768] env[68233]: value = "task-2782062" [ 712.978768] env[68233]: _type = "Task" [ 712.978768] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.988517] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.998758] env[68233]: DEBUG nova.network.neutron [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Successfully updated port: 5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.006041] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bf874d7-08ee-47a5-b4bf-114203b202b6 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.030s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.035474] env[68233]: INFO nova.compute.manager [-] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Took 1.28 seconds to deallocate network for instance. [ 713.092596] env[68233]: DEBUG oslo_vmware.api [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782059, 'name': PowerOnVM_Task, 'duration_secs': 0.755958} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.092596] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.092596] env[68233]: INFO nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Took 13.79 seconds to spawn the instance on the hypervisor. [ 713.092596] env[68233]: DEBUG nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.092596] env[68233]: DEBUG nova.network.neutron [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Successfully created port: 6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 713.094705] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c6de01-5541-4ae0-bad5-c02f0199208e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.130419] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 713.159046] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.159212] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 35cbc15b-48d8-4acd-a957-eec3421df1ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.159340] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4a388705-7e00-45dc-8891-c6e587b1cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.159462] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 87385201-3118-4a8e-9739-db3b431566c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.159577] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 11ec9800-fa7e-4dbd-bdc1-63d0b496589f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.159726] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance a340c66c-74eb-43e5-8e72-54d9c8b07a26 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 713.161337] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance ba4ad2f8-fad1-45be-b2b1-68c3a58f3750 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.161808] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2812bf7c-5117-4fd9-9330-0cc94277bf5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.161958] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c6a358b7-0e6a-43bb-a171-5e6175f947bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.162215] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 75f58a50-7891-42df-8820-c997300a3159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.162215] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.162738] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 6ceb7d2d-143a-464a-aca5-6b6838630bb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.162834] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 769956c6-7824-41db-9779-fc1b5f53dd94 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.163146] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 6ae76b0f-7df2-4652-b4c3-92c16ed487a1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 713.163146] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 0f7d80d2-5c34-42f7-a14a-97f9625675a8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 713.163246] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 88d67405-b8c6-484a-b178-68a8babb3708 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 713.163365] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 636b6b36-3ab5-4851-a232-d27b54895595 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 713.163477] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 3c9b701e-6461-45e3-8654-3291c5a487b9 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.163589] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 080ab438-269b-427a-9ee9-71c59d9c2a91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.164201] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance f2af60e6-496c-4edb-9e99-4b45fa94bfeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.164201] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance b056fbf4-4873-4ec9-905a-ad973c8fb27a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.164306] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 5ed44950-8e9b-4f42-9611-d5bff01dc905 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.164381] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.164473] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance a5468df9-c54d-4014-8002-ef82f111a7a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 713.169031] env[68233]: DEBUG nova.network.neutron [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Updated VIF entry in instance network info cache for port fd3d32d5-4e84-47da-9098-50381834dfef. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 713.169031] env[68233]: DEBUG nova.network.neutron [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Updating instance_info_cache with network_info: [{"id": "fd3d32d5-4e84-47da-9098-50381834dfef", "address": "fa:16:3e:49:f8:1f", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd3d32d5-4e", "ovs_interfaceid": "fd3d32d5-4e84-47da-9098-50381834dfef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.310260] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5243eb14-fc3f-db76-73e0-55972e6739e2, 'name': SearchDatastore_Task, 'duration_secs': 0.009695} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.310719] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.310823] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 713.311096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.311243] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.311430] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.311829] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4808ada-9396-45b2-a501-e3bf497416ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.321596] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.321772] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 713.322552] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f8faa1-8313-4313-bb3b-fa37282a6e20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.328458] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 713.328458] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c0ef37-9180-7d11-b026-0760b6ff1f01" [ 713.328458] env[68233]: _type = "Task" [ 713.328458] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.337226] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c0ef37-9180-7d11-b026-0760b6ff1f01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.498621] env[68233]: DEBUG oslo_vmware.api [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782062, 'name': PowerOnVM_Task, 'duration_secs': 0.456847} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.499093] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 713.499528] env[68233]: INFO nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Took 6.87 seconds to spawn the instance on the hypervisor. [ 713.499672] env[68233]: DEBUG nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 713.502184] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d7a605-2d34-4f55-bde1-56ce920edb2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.504715] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.504903] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquired lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.506097] env[68233]: DEBUG nova.network.neutron [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 713.508580] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.544022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.614821] env[68233]: INFO nova.compute.manager [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Took 55.45 seconds to build instance. [ 713.671424] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance f7a1bfc5-7141-4764-b3fe-08d06020209a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 713.674009] env[68233]: DEBUG oslo_concurrency.lockutils [req-c43c03d9-743e-4f25-991e-875a13ccc9fe req-9bb2265e-7c99-459f-9cf3-ef4eef2cacd5 service nova] Releasing lock "refresh_cache-5ed44950-8e9b-4f42-9611-d5bff01dc905" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.840619] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c0ef37-9180-7d11-b026-0760b6ff1f01, 'name': SearchDatastore_Task, 'duration_secs': 0.009289} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.841448] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47902ea3-5cb8-4e7e-977c-96d8b8a31a8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.847024] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 713.847024] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523114a4-557a-2fe6-c9df-7c8c95f5651b" [ 713.847024] env[68233]: _type = "Task" [ 713.847024] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.855342] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523114a4-557a-2fe6-c9df-7c8c95f5651b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.032868] env[68233]: INFO nova.compute.manager [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Took 44.70 seconds to build instance. [ 714.044443] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.046804] env[68233]: DEBUG nova.network.neutron [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 714.117416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9d4239ba-6bc5-4d79-9823-11f110e9e5f4 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.373s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.141494] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 714.167996] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 714.168277] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 714.168437] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 714.168622] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 714.168763] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 714.168938] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 714.169195] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 714.169362] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 714.169528] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 714.169688] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 714.169858] env[68233]: DEBUG nova.virt.hardware [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 714.170815] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c354f64f-189d-444f-89d3-323b5beacfa7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.176153] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 86528c8b-b51e-480d-a7bf-013d990d51ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 714.182880] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54357687-0eb5-41eb-a639-39be96a1381a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.198265] env[68233]: DEBUG nova.network.neutron [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updating instance_info_cache with network_info: [{"id": "5602cbb3-fef2-4353-917c-04002ea9ac31", "address": "fa:16:3e:3c:ba:20", "network": {"id": "7f46dcfc-7130-4b07-bda6-c83fad5f711b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-927919257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e86ceada21f1436ea2cddc9be5a41864", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5602cbb3-fe", "ovs_interfaceid": "5602cbb3-fef2-4353-917c-04002ea9ac31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.356795] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523114a4-557a-2fe6-c9df-7c8c95f5651b, 'name': SearchDatastore_Task, 'duration_secs': 0.008683} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.357071] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.357335] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 5ed44950-8e9b-4f42-9611-d5bff01dc905/5ed44950-8e9b-4f42-9611-d5bff01dc905.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 714.357587] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e0bb992-46bf-4c4e-84cd-f7922ece46d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.364360] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 714.364360] env[68233]: value = "task-2782063" [ 714.364360] env[68233]: _type = "Task" [ 714.364360] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.373774] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.466530] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "080ab438-269b-427a-9ee9-71c59d9c2a91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.466829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.467061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.467256] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.467519] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.472713] env[68233]: INFO nova.compute.manager [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Terminating instance [ 714.537074] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Received event network-changed-84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 714.537632] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Refreshing instance network info cache due to event network-changed-84572635-d33f-44cd-8a87-a9af1019bf50. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 714.542040] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquiring lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.542040] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquired lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.542040] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Refreshing network info cache for port 84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 714.542040] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cae96251-6156-433a-abc9-40fca854cf7d tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.428s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.621022] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 714.633933] env[68233]: DEBUG nova.network.neutron [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Successfully updated port: 6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 714.659804] env[68233]: DEBUG nova.compute.manager [req-ccdfebed-acad-4ec4-ad93-c86de95ecad8 req-a7f065db-62d6-4b59-904e-2226db4b94e7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Received event network-vif-plugged-6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 714.660497] env[68233]: DEBUG oslo_concurrency.lockutils [req-ccdfebed-acad-4ec4-ad93-c86de95ecad8 req-a7f065db-62d6-4b59-904e-2226db4b94e7 service nova] Acquiring lock "a5468df9-c54d-4014-8002-ef82f111a7a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.660497] env[68233]: DEBUG oslo_concurrency.lockutils [req-ccdfebed-acad-4ec4-ad93-c86de95ecad8 req-a7f065db-62d6-4b59-904e-2226db4b94e7 service nova] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.660681] env[68233]: DEBUG oslo_concurrency.lockutils [req-ccdfebed-acad-4ec4-ad93-c86de95ecad8 req-a7f065db-62d6-4b59-904e-2226db4b94e7 service nova] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.661257] env[68233]: DEBUG nova.compute.manager [req-ccdfebed-acad-4ec4-ad93-c86de95ecad8 req-a7f065db-62d6-4b59-904e-2226db4b94e7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] No waiting events found dispatching network-vif-plugged-6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 714.661342] env[68233]: WARNING nova.compute.manager [req-ccdfebed-acad-4ec4-ad93-c86de95ecad8 req-a7f065db-62d6-4b59-904e-2226db4b94e7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Received unexpected event network-vif-plugged-6a95e672-349a-487c-b8cb-da7a75d3ec95 for instance with vm_state building and task_state spawning. [ 714.688339] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 714.701517] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Releasing lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.701987] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Instance network_info: |[{"id": "5602cbb3-fef2-4353-917c-04002ea9ac31", "address": "fa:16:3e:3c:ba:20", "network": {"id": "7f46dcfc-7130-4b07-bda6-c83fad5f711b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-927919257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e86ceada21f1436ea2cddc9be5a41864", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5602cbb3-fe", "ovs_interfaceid": "5602cbb3-fef2-4353-917c-04002ea9ac31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 714.702754] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:ba:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52358fcc-0d9f-45dd-8c75-db533fd992c3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5602cbb3-fef2-4353-917c-04002ea9ac31', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.711031] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Creating folder: Project (e86ceada21f1436ea2cddc9be5a41864). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 714.713677] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37b2e31b-27c4-4b26-a9c7-8d161ec5c043 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.729232] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Created folder: Project (e86ceada21f1436ea2cddc9be5a41864) in parent group-v559223. [ 714.729465] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Creating folder: Instances. Parent ref: group-v559327. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 714.729725] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c34c6eec-203d-4f3a-ad22-f9a4f14f8962 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.740170] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Created folder: Instances in parent group-v559327. [ 714.740468] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.740676] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 714.740893] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-93ee0d25-9bf1-4ee4-a58e-c4c03ad3b74e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.763983] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.763983] env[68233]: value = "task-2782066" [ 714.763983] env[68233]: _type = "Task" [ 714.763983] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.772379] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782066, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.881140] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453813} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.881140] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 5ed44950-8e9b-4f42-9611-d5bff01dc905/5ed44950-8e9b-4f42-9611-d5bff01dc905.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 714.881140] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 714.881140] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d407d38d-fd45-4695-9e36-35fd97d94299 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.891648] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 714.891648] env[68233]: value = "task-2782067" [ 714.891648] env[68233]: _type = "Task" [ 714.891648] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.901396] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.980789] env[68233]: DEBUG nova.compute.manager [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 714.981124] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 714.982213] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72731d81-ddf5-497e-902a-af0a1bf15b75 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.992313] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 714.992782] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3ebf595-9c7b-4856-8a4e-ce57ed1c840d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.999241] env[68233]: DEBUG oslo_vmware.api [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 714.999241] env[68233]: value = "task-2782068" [ 714.999241] env[68233]: _type = "Task" [ 714.999241] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.008361] env[68233]: DEBUG oslo_vmware.api [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782068, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.046022] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 715.137360] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "refresh_cache-a5468df9-c54d-4014-8002-ef82f111a7a4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.137571] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquired lock "refresh_cache-a5468df9-c54d-4014-8002-ef82f111a7a4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.137745] env[68233]: DEBUG nova.network.neutron [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 715.154165] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.192313] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2a88648c-f00d-4d7b-905d-e70c327e248a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 715.266252] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updated VIF entry in instance network info cache for port 84572635-d33f-44cd-8a87-a9af1019bf50. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 715.266385] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updating instance_info_cache with network_info: [{"id": "84572635-d33f-44cd-8a87-a9af1019bf50", "address": "fa:16:3e:e6:c3:88", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84572635-d3", "ovs_interfaceid": "84572635-d33f-44cd-8a87-a9af1019bf50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.282252] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782066, 'name': CreateVM_Task, 'duration_secs': 0.485214} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.282451] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 715.283158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.283321] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.283637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 715.284423] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-182adc7c-c8aa-4fee-84f4-052c0417ae5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.289939] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 715.289939] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52be354f-2adc-810f-0e58-9aee55813483" [ 715.289939] env[68233]: _type = "Task" [ 715.289939] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.298100] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52be354f-2adc-810f-0e58-9aee55813483, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.312144] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.312475] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.312711] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.312889] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.313074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.315274] env[68233]: INFO nova.compute.manager [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Terminating instance [ 715.403415] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074122} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.403415] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.404219] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d802fef0-060b-45f9-9139-eafd5b5f11b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.427459] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] 5ed44950-8e9b-4f42-9611-d5bff01dc905/5ed44950-8e9b-4f42-9611-d5bff01dc905.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.427767] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b7b5d29-36a6-4062-9caa-0c8d3c5a8388 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.449750] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 715.449750] env[68233]: value = "task-2782069" [ 715.449750] env[68233]: _type = "Task" [ 715.449750] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.459051] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.508725] env[68233]: DEBUG oslo_vmware.api [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782068, 'name': PowerOffVM_Task, 'duration_secs': 0.278167} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.508995] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 715.509186] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 715.509470] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-678fffea-d83b-41be-8528-9b8e89a463e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.567407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.678564] env[68233]: DEBUG nova.network.neutron [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.698709] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c8fd5539-8add-45fe-a0ac-8767bf8a330e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 715.728267] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 715.728267] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 715.728267] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Deleting the datastore file [datastore2] 080ab438-269b-427a-9ee9-71c59d9c2a91 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 715.728267] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-756f3977-0e1c-41d4-bca0-d453b486ed12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.735174] env[68233]: DEBUG oslo_vmware.api [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 715.735174] env[68233]: value = "task-2782071" [ 715.735174] env[68233]: _type = "Task" [ 715.735174] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.743518] env[68233]: DEBUG oslo_vmware.api [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.775818] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Releasing lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.776192] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Received event network-vif-plugged-5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 715.776420] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquiring lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.776658] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.776837] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 715.777014] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] No waiting events found dispatching network-vif-plugged-5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 715.777185] env[68233]: WARNING nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Received unexpected event network-vif-plugged-5602cbb3-fef2-4353-917c-04002ea9ac31 for instance with vm_state building and task_state spawning. [ 715.777345] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Received event network-changed-5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 715.778033] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Refreshing instance network info cache due to event network-changed-5602cbb3-fef2-4353-917c-04002ea9ac31. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 715.778033] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquiring lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.778033] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquired lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.778033] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Refreshing network info cache for port 5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 715.801243] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52be354f-2adc-810f-0e58-9aee55813483, 'name': SearchDatastore_Task, 'duration_secs': 0.06813} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.804614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.804895] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.805187] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.805354] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.805539] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 715.806114] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3184daf-9f1a-4e89-9b54-ab02d0e549e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.814755] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 715.814948] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 715.815688] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c54556cc-ec30-430c-bf4c-1e88ab4fafd1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.820238] env[68233]: DEBUG nova.compute.manager [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 715.820476] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.821318] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5f69f1-b7e4-4215-bc6f-90f67fd6d77a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.832368] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 715.832723] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 715.832723] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5279ae50-65cb-a926-d695-e3fe19d2153b" [ 715.832723] env[68233]: _type = "Task" [ 715.832723] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.832947] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6f30dbb-284a-44c7-aa17-be536c7d8e70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.845771] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5279ae50-65cb-a926-d695-e3fe19d2153b, 'name': SearchDatastore_Task, 'duration_secs': 0.00993} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.846490] env[68233]: DEBUG nova.network.neutron [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Updating instance_info_cache with network_info: [{"id": "6a95e672-349a-487c-b8cb-da7a75d3ec95", "address": "fa:16:3e:07:28:23", "network": {"id": "b6abb4c1-9800-47e1-b56e-1b12eb931899", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1112117547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b675b80ce4ea4ef5a8e9fb43cf2385f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a95e672-34", "ovs_interfaceid": "6a95e672-349a-487c-b8cb-da7a75d3ec95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.850769] env[68233]: DEBUG oslo_vmware.api [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 715.850769] env[68233]: value = "task-2782072" [ 715.850769] env[68233]: _type = "Task" [ 715.850769] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.850769] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3af53e73-33b8-4019-8ba8-4c8f52be6273 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.860127] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 715.860127] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5231c266-3ced-1bcf-0def-39fe53331b1d" [ 715.860127] env[68233]: _type = "Task" [ 715.860127] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.863980] env[68233]: DEBUG oslo_vmware.api [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782072, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.875956] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5231c266-3ced-1bcf-0def-39fe53331b1d, 'name': SearchDatastore_Task, 'duration_secs': 0.010722} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.877019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.877019] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb/9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 715.877019] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db74be54-1ff5-4921-8b3f-ea681c2b9def {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.884724] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 715.884724] env[68233]: value = "task-2782073" [ 715.884724] env[68233]: _type = "Task" [ 715.884724] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.893984] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.961189] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782069, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.203703] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 876d428d-d5c9-422a-aba2-2d6c61b092db has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 716.244921] env[68233]: DEBUG oslo_vmware.api [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163604} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.245226] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 716.246472] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 716.246472] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 716.246472] env[68233]: INFO nova.compute.manager [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Took 1.26 seconds to destroy the instance on the hypervisor. [ 716.246472] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.246472] env[68233]: DEBUG nova.compute.manager [-] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 716.246472] env[68233]: DEBUG nova.network.neutron [-] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 716.351671] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Releasing lock "refresh_cache-a5468df9-c54d-4014-8002-ef82f111a7a4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.352198] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Instance network_info: |[{"id": "6a95e672-349a-487c-b8cb-da7a75d3ec95", "address": "fa:16:3e:07:28:23", "network": {"id": "b6abb4c1-9800-47e1-b56e-1b12eb931899", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1112117547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b675b80ce4ea4ef5a8e9fb43cf2385f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a95e672-34", "ovs_interfaceid": "6a95e672-349a-487c-b8cb-da7a75d3ec95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 716.353218] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:28:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a95e672-349a-487c-b8cb-da7a75d3ec95', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.368027] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Creating folder: Project (b675b80ce4ea4ef5a8e9fb43cf2385f1). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 716.368027] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1bfdfbc-d729-4168-9d67-9da21becb18e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.386809] env[68233]: DEBUG oslo_vmware.api [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782072, 'name': PowerOffVM_Task, 'duration_secs': 0.313257} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.391545] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 716.391778] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 716.392148] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Created folder: Project (b675b80ce4ea4ef5a8e9fb43cf2385f1) in parent group-v559223. [ 716.392364] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Creating folder: Instances. Parent ref: group-v559330. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 716.395606] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05b1c26b-5104-4a1a-9a38-df454fd05699 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.397494] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4d318f7-a594-48df-a847-a42788923125 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.407515] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514901} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.407844] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb/9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 716.408133] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 716.409864] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c40d546f-5089-4e66-86e2-9ba3c4ef2764 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.412391] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Created folder: Instances in parent group-v559330. [ 716.412710] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.412953] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 716.413636] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-013d9446-a356-42b6-8d11-1ded46960168 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.440308] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 716.440308] env[68233]: value = "task-2782077" [ 716.440308] env[68233]: _type = "Task" [ 716.440308] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.446667] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.446667] env[68233]: value = "task-2782078" [ 716.446667] env[68233]: _type = "Task" [ 716.446667] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.458252] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.468928] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782078, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.473238] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782069, 'name': ReconfigVM_Task, 'duration_secs': 0.725955} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.473578] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Reconfigured VM instance instance-00000023 to attach disk [datastore2] 5ed44950-8e9b-4f42-9611-d5bff01dc905/5ed44950-8e9b-4f42-9611-d5bff01dc905.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 716.474387] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9267a608-af02-4256-9c76-604848c78c08 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.478195] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 716.478449] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 716.478686] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Deleting the datastore file [datastore2] b056fbf4-4873-4ec9-905a-ad973c8fb27a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 716.478971] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12e9ec33-c304-46fe-9f24-3b59982f62c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.484408] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 716.484408] env[68233]: value = "task-2782079" [ 716.484408] env[68233]: _type = "Task" [ 716.484408] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.486059] env[68233]: DEBUG oslo_vmware.api [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for the task: (returnval){ [ 716.486059] env[68233]: value = "task-2782080" [ 716.486059] env[68233]: _type = "Task" [ 716.486059] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.506611] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782079, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.506853] env[68233]: DEBUG oslo_vmware.api [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.640222] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updated VIF entry in instance network info cache for port 5602cbb3-fef2-4353-917c-04002ea9ac31. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 716.640630] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updating instance_info_cache with network_info: [{"id": "5602cbb3-fef2-4353-917c-04002ea9ac31", "address": "fa:16:3e:3c:ba:20", "network": {"id": "7f46dcfc-7130-4b07-bda6-c83fad5f711b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-927919257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e86ceada21f1436ea2cddc9be5a41864", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5602cbb3-fe", "ovs_interfaceid": "5602cbb3-fef2-4353-917c-04002ea9ac31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.707386] env[68233]: DEBUG nova.compute.manager [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Received event network-changed-6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 716.707642] env[68233]: DEBUG nova.compute.manager [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Refreshing instance network info cache due to event network-changed-6a95e672-349a-487c-b8cb-da7a75d3ec95. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 716.707867] env[68233]: DEBUG oslo_concurrency.lockutils [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] Acquiring lock "refresh_cache-a5468df9-c54d-4014-8002-ef82f111a7a4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.708151] env[68233]: DEBUG oslo_concurrency.lockutils [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] Acquired lock "refresh_cache-a5468df9-c54d-4014-8002-ef82f111a7a4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.708232] env[68233]: DEBUG nova.network.neutron [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Refreshing network info cache for port 6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.710834] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dcd8cca2-b62c-44a6-9e77-f336d2d39c09 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 716.954040] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071081} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.954413] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 716.955155] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0deaf695-769e-4af4-aeff-3a34bfbe20dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.960178] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782078, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.989942] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb/9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 716.989942] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a138aa9b-d289-480a-84db-8d660c82c17e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.020599] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782079, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.025422] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 717.025422] env[68233]: value = "task-2782081" [ 717.025422] env[68233]: _type = "Task" [ 717.025422] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.025677] env[68233]: DEBUG oslo_vmware.api [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Task: {'id': task-2782080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127099} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.025998] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 717.026232] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 717.026616] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 717.026915] env[68233]: INFO nova.compute.manager [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Took 1.21 seconds to destroy the instance on the hypervisor. [ 717.027200] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 717.030511] env[68233]: DEBUG nova.compute.manager [-] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 717.032044] env[68233]: DEBUG nova.network.neutron [-] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 717.038660] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.143737] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Releasing lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.144086] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Received event network-changed-84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 717.144304] env[68233]: DEBUG nova.compute.manager [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Refreshing instance network info cache due to event network-changed-84572635-d33f-44cd-8a87-a9af1019bf50. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 717.144534] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquiring lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.144675] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Acquired lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.144835] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Refreshing network info cache for port 84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 717.214551] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance abdf9de2-8563-4a31-91a3-0c18b0387533 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 717.456906] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782078, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.494423] env[68233]: DEBUG nova.network.neutron [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Updated VIF entry in instance network info cache for port 6a95e672-349a-487c-b8cb-da7a75d3ec95. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.494779] env[68233]: DEBUG nova.network.neutron [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Updating instance_info_cache with network_info: [{"id": "6a95e672-349a-487c-b8cb-da7a75d3ec95", "address": "fa:16:3e:07:28:23", "network": {"id": "b6abb4c1-9800-47e1-b56e-1b12eb931899", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1112117547-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b675b80ce4ea4ef5a8e9fb43cf2385f1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a95e672-34", "ovs_interfaceid": "6a95e672-349a-487c-b8cb-da7a75d3ec95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.499161] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782079, 'name': Rename_Task, 'duration_secs': 0.867331} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.499633] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 717.499872] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62b1073c-db9b-42e4-8bc4-f2c600d703fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.506526] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 717.506526] env[68233]: value = "task-2782082" [ 717.506526] env[68233]: _type = "Task" [ 717.506526] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.515249] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.535085] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782081, 'name': ReconfigVM_Task, 'duration_secs': 0.289443} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.535313] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb/9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 717.535939] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86d15adf-c5c2-4e60-885a-55fd4d5eb0d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.541640] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 717.541640] env[68233]: value = "task-2782083" [ 717.541640] env[68233]: _type = "Task" [ 717.541640] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.549924] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782083, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.624293] env[68233]: DEBUG nova.network.neutron [-] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.718338] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 717.856396] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updated VIF entry in instance network info cache for port 84572635-d33f-44cd-8a87-a9af1019bf50. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.856396] env[68233]: DEBUG nova.network.neutron [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updating instance_info_cache with network_info: [{"id": "84572635-d33f-44cd-8a87-a9af1019bf50", "address": "fa:16:3e:e6:c3:88", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84572635-d3", "ovs_interfaceid": "84572635-d33f-44cd-8a87-a9af1019bf50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.865361] env[68233]: DEBUG nova.network.neutron [-] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.958175] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782078, 'name': CreateVM_Task, 'duration_secs': 1.218552} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.959027] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 717.959027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.959175] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.959484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 717.959743] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dcb59fa-9ec8-43e3-9095-3779de1fec9a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.965015] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 717.965015] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52841165-b26b-1554-74a6-441c9a056a50" [ 717.965015] env[68233]: _type = "Task" [ 717.965015] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.974360] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52841165-b26b-1554-74a6-441c9a056a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.000267] env[68233]: DEBUG oslo_concurrency.lockutils [req-242cce5d-906c-4c8c-b1a9-ed6ca979d960 req-9af14c95-3483-45ba-a38f-ac105aae1c22 service nova] Releasing lock "refresh_cache-a5468df9-c54d-4014-8002-ef82f111a7a4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.017767] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782082, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.050571] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782083, 'name': Rename_Task, 'duration_secs': 0.186221} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.050843] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 718.051100] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae712510-725a-4108-80c1-4cd67c22ff60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.057873] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 718.057873] env[68233]: value = "task-2782084" [ 718.057873] env[68233]: _type = "Task" [ 718.057873] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.065779] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782084, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.125673] env[68233]: INFO nova.compute.manager [-] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Took 1.88 seconds to deallocate network for instance. [ 718.223572] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 13972b73-8bae-4a2a-a987-b6177381e7c8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 718.359255] env[68233]: DEBUG oslo_concurrency.lockutils [req-b365516c-f81f-45a5-9c17-bb785ed41c4d req-21bfb837-1861-4e25-86fe-43a2ee2d36c2 service nova] Releasing lock "refresh_cache-f2af60e6-496c-4edb-9e99-4b45fa94bfeb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.368590] env[68233]: INFO nova.compute.manager [-] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Took 1.34 seconds to deallocate network for instance. [ 718.475413] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52841165-b26b-1554-74a6-441c9a056a50, 'name': SearchDatastore_Task, 'duration_secs': 0.009541} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.475717] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.475930] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 718.476170] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.476316] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.476491] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 718.476743] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-77bb98a9-2c56-4595-8903-5ea157ecf286 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.485378] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 718.485554] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 718.486331] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b5a660f-72b2-4e86-b5e1-970a32a83dbc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.491706] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 718.491706] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a487cd-cb36-da6d-b413-8a6d3b07376d" [ 718.491706] env[68233]: _type = "Task" [ 718.491706] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.498996] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a487cd-cb36-da6d-b413-8a6d3b07376d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.516373] env[68233]: DEBUG oslo_vmware.api [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782082, 'name': PowerOnVM_Task, 'duration_secs': 0.668597} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.516616] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 718.516815] env[68233]: INFO nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Took 9.32 seconds to spawn the instance on the hypervisor. [ 718.516993] env[68233]: DEBUG nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.517746] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-039439b0-9494-4704-9b87-ccb88ba355f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.568323] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782084, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.633664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.690656] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 718.690882] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.726353] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 72467d49-6fa8-42db-871e-4e50e77eedf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 718.735558] env[68233]: DEBUG nova.compute.manager [req-e1f9f1d7-df02-4ffd-89db-ecdce87e7b61 req-1af49d56-58a3-4c83-80ac-b4e716a65c63 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-vif-deleted-c8ec87e9-21b0-43c3-b4f2-028b6a94ef8e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 718.735792] env[68233]: DEBUG nova.compute.manager [req-e1f9f1d7-df02-4ffd-89db-ecdce87e7b61 req-1af49d56-58a3-4c83-80ac-b4e716a65c63 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-vif-deleted-554b0c26-9841-4611-927e-bc01c9633734 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 718.736030] env[68233]: DEBUG nova.compute.manager [req-e1f9f1d7-df02-4ffd-89db-ecdce87e7b61 req-1af49d56-58a3-4c83-80ac-b4e716a65c63 service nova] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Received event network-vif-deleted-fed32956-586c-44c1-adff-5d2b750f410c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 718.736178] env[68233]: DEBUG nova.compute.manager [req-e1f9f1d7-df02-4ffd-89db-ecdce87e7b61 req-1af49d56-58a3-4c83-80ac-b4e716a65c63 service nova] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Received event network-vif-deleted-9a9fa3f8-4a3f-4dbb-8298-dd1497f7e673 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 718.877025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.003367] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a487cd-cb36-da6d-b413-8a6d3b07376d, 'name': SearchDatastore_Task, 'duration_secs': 0.008776} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.004243] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-143ca5c0-ff72-4fc2-aa9f-153a9522ed8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.009579] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 719.009579] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525ff753-dad6-74a5-238f-46e70505207c" [ 719.009579] env[68233]: _type = "Task" [ 719.009579] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.018956] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525ff753-dad6-74a5-238f-46e70505207c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.032126] env[68233]: INFO nova.compute.manager [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Took 43.86 seconds to build instance. [ 719.069035] env[68233]: DEBUG oslo_vmware.api [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782084, 'name': PowerOnVM_Task, 'duration_secs': 0.547744} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.069035] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 719.069035] env[68233]: INFO nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Took 7.21 seconds to spawn the instance on the hypervisor. [ 719.069035] env[68233]: DEBUG nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.069420] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef28d5a9-8791-4d54-b8bf-efe3b7f41018 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.229599] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 990e1a66-f2ab-4925-b1da-58cdc41a6315 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 719.229773] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 719.229899] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4224MB phys_disk=200GB used_disk=18GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 719.524740] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525ff753-dad6-74a5-238f-46e70505207c, 'name': SearchDatastore_Task, 'duration_secs': 0.009621} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.529026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 719.529026] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] a5468df9-c54d-4014-8002-ef82f111a7a4/a5468df9-c54d-4014-8002-ef82f111a7a4.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 719.529026] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29a87fb1-ea91-42f7-a319-4719e41098c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.533414] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f97ee21a-bf42-4cd5-8be0-4600034ce210 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.225s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.536010] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 719.536010] env[68233]: value = "task-2782085" [ 719.536010] env[68233]: _type = "Task" [ 719.536010] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.547309] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.598065] env[68233]: INFO nova.compute.manager [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Took 42.51 seconds to build instance. [ 719.778414] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f4a9f8-8fac-42ec-bfae-7c4a1ea7e7fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.789423] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52c7fa7-3fb5-4616-8bea-872d1fdbc567 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.826391] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eea7f3c-2f05-4b71-9c5a-f83057b3be8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.834309] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf4047c-0b98-4391-9bfe-6e43bcc9dbab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.849477] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.037809] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 720.051044] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782085, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503515} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.051044] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] a5468df9-c54d-4014-8002-ef82f111a7a4/a5468df9-c54d-4014-8002-ef82f111a7a4.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 720.051044] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 720.051044] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb974cb6-0abc-4951-afd2-1458099350ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.057088] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 720.057088] env[68233]: value = "task-2782086" [ 720.057088] env[68233]: _type = "Task" [ 720.057088] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.064293] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782086, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.100234] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e7b1477f-53fe-4775-89fd-c6080a7fd47e tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.753s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.277945] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "5ed44950-8e9b-4f42-9611-d5bff01dc905" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.277945] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.277945] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "5ed44950-8e9b-4f42-9611-d5bff01dc905-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.277945] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.277945] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.280037] env[68233]: INFO nova.compute.manager [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Terminating instance [ 720.353214] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.574521] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782086, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065509} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.575553] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.575854] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 720.576695] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363e5fa5-fe1e-4694-b1c9-a4a35a3c80c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.604434] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 720.622187] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] a5468df9-c54d-4014-8002-ef82f111a7a4/a5468df9-c54d-4014-8002-ef82f111a7a4.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 720.623654] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cdd00a1-775f-4efa-bb43-1aec315bb3fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.652029] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 720.652029] env[68233]: value = "task-2782087" [ 720.652029] env[68233]: _type = "Task" [ 720.652029] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.664435] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782087, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.784966] env[68233]: DEBUG nova.compute.manager [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 720.786274] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 720.786274] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf26d28c-85d1-4fa8-b4b6-62cb9bad2059 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.794569] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.794569] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3c4a6b70-37ab-42f4-9946-69113ace2435 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.802257] env[68233]: DEBUG oslo_vmware.api [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 720.802257] env[68233]: value = "task-2782088" [ 720.802257] env[68233]: _type = "Task" [ 720.802257] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.808572] env[68233]: DEBUG oslo_vmware.api [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782088, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.823549] env[68233]: DEBUG nova.compute.manager [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Received event network-changed-5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 720.823757] env[68233]: DEBUG nova.compute.manager [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Refreshing instance network info cache due to event network-changed-5602cbb3-fef2-4353-917c-04002ea9ac31. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 720.823971] env[68233]: DEBUG oslo_concurrency.lockutils [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] Acquiring lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.824410] env[68233]: DEBUG oslo_concurrency.lockutils [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] Acquired lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.824609] env[68233]: DEBUG nova.network.neutron [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Refreshing network info cache for port 5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 720.857380] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 720.857628] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.751s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.857888] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.097s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.858081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.860348] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.541s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.860547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.862722] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.008s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.864210] env[68233]: INFO nova.compute.claims [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.894076] env[68233]: INFO nova.scheduler.client.report [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Deleted allocations for instance a340c66c-74eb-43e5-8e72-54d9c8b07a26 [ 720.898580] env[68233]: INFO nova.scheduler.client.report [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Deleted allocations for instance 6ae76b0f-7df2-4652-b4c3-92c16ed487a1 [ 721.146672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.165396] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782087, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.313219] env[68233]: DEBUG oslo_vmware.api [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782088, 'name': PowerOffVM_Task, 'duration_secs': 0.39314} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.313219] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 721.313219] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 721.313219] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33e71dce-6fa5-4beb-b210-f18fe55bfa5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.397157] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 721.397478] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 721.397694] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore2] 5ed44950-8e9b-4f42-9611-d5bff01dc905 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 721.401533] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92dc185b-cd4c-426d-8190-fad0c7b16623 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.408293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae9d3e3a-7405-47dc-a264-24cadb93c145 tempest-ListServerFiltersTestJSON-1636021690 tempest-ListServerFiltersTestJSON-1636021690-project-member] Lock "a340c66c-74eb-43e5-8e72-54d9c8b07a26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.718s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.416241] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35b8a4db-3bf0-41a2-a4e5-1341c65ca036 tempest-ServerGroupTestJSON-1004789544 tempest-ServerGroupTestJSON-1004789544-project-member] Lock "6ae76b0f-7df2-4652-b4c3-92c16ed487a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.112s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.418526] env[68233]: DEBUG oslo_vmware.api [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 721.418526] env[68233]: value = "task-2782090" [ 721.418526] env[68233]: _type = "Task" [ 721.418526] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.427735] env[68233]: DEBUG oslo_vmware.api [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.568594] env[68233]: DEBUG nova.network.neutron [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updated VIF entry in instance network info cache for port 5602cbb3-fef2-4353-917c-04002ea9ac31. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 721.568949] env[68233]: DEBUG nova.network.neutron [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updating instance_info_cache with network_info: [{"id": "5602cbb3-fef2-4353-917c-04002ea9ac31", "address": "fa:16:3e:3c:ba:20", "network": {"id": "7f46dcfc-7130-4b07-bda6-c83fad5f711b", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-927919257-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.163", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e86ceada21f1436ea2cddc9be5a41864", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52358fcc-0d9f-45dd-8c75-db533fd992c3", "external-id": "nsx-vlan-transportzone-77", "segmentation_id": 77, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5602cbb3-fe", "ovs_interfaceid": "5602cbb3-fef2-4353-917c-04002ea9ac31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.663087] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782087, 'name': ReconfigVM_Task, 'duration_secs': 0.571851} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.663419] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Reconfigured VM instance instance-00000025 to attach disk [datastore2] a5468df9-c54d-4014-8002-ef82f111a7a4/a5468df9-c54d-4014-8002-ef82f111a7a4.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 721.664062] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70d1337a-6514-48f9-b5df-9512bf96ff84 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.670351] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 721.670351] env[68233]: value = "task-2782091" [ 721.670351] env[68233]: _type = "Task" [ 721.670351] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.681022] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782091, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.913581] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "35cbc15b-48d8-4acd-a957-eec3421df1ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.913831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.914046] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "35cbc15b-48d8-4acd-a957-eec3421df1ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.914233] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.914403] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.916996] env[68233]: INFO nova.compute.manager [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Terminating instance [ 721.929186] env[68233]: DEBUG oslo_vmware.api [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168081} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.932116] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 721.932388] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 721.932600] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 721.932778] env[68233]: INFO nova.compute.manager [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Took 1.15 seconds to destroy the instance on the hypervisor. [ 721.933031] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 721.934276] env[68233]: DEBUG nova.compute.manager [-] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 721.934372] env[68233]: DEBUG nova.network.neutron [-] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 722.073284] env[68233]: DEBUG oslo_concurrency.lockutils [req-23de7642-e8a7-4f34-a284-45d6bd1dc1d1 req-1cde65c2-72ef-46b1-899f-cb8f4fb8cdd3 service nova] Releasing lock "refresh_cache-9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.180317] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782091, 'name': Rename_Task, 'duration_secs': 0.167937} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.182878] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 722.183342] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0bfa26c-2712-4018-9775-e74b7ebcca1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.190582] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 722.190582] env[68233]: value = "task-2782092" [ 722.190582] env[68233]: _type = "Task" [ 722.190582] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.201281] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782092, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.381574] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6ce3de-9b82-4761-92aa-9b72e64a94dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.390596] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029225d6-66ac-40b3-b351-6a443cd42bc1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.422455] env[68233]: DEBUG nova.compute.manager [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 722.422674] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 722.423645] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453cf25e-dde8-450b-a7d6-2605fa8701aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.426825] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cf55c0-bbb8-4122-b6b7-08624e9dbed1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.436371] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b2a1be-6984-4a40-bb6f-c7668b001da9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.440224] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 722.440458] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f158aa9-3790-45ef-a8ac-ca39c17f0d9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.453462] env[68233]: DEBUG nova.compute.provider_tree [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.456021] env[68233]: DEBUG oslo_vmware.api [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 722.456021] env[68233]: value = "task-2782093" [ 722.456021] env[68233]: _type = "Task" [ 722.456021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.463812] env[68233]: DEBUG oslo_vmware.api [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.627345] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 722.627345] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 722.703648] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782092, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.734867] env[68233]: DEBUG nova.network.neutron [-] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.957251] env[68233]: DEBUG nova.compute.manager [req-02a5d6ad-e403-4644-8a1a-1f365f93ac0f req-b5d0f3a2-bb1b-41f1-8e3a-57b3b7be1d25 service nova] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Received event network-vif-deleted-fd3d32d5-4e84-47da-9098-50381834dfef {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 722.957251] env[68233]: DEBUG nova.scheduler.client.report [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 722.973399] env[68233]: DEBUG oslo_vmware.api [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782093, 'name': PowerOffVM_Task, 'duration_secs': 0.194508} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.973685] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 722.973801] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 722.974086] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fcb81ca-fcce-4245-a4ba-749a9621a9f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.039349] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 723.039709] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 723.040331] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Deleting the datastore file [datastore2] 35cbc15b-48d8-4acd-a957-eec3421df1ce {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 723.040756] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5e2ca28-be01-4527-9d1a-c04357f6072c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.050550] env[68233]: DEBUG oslo_vmware.api [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 723.050550] env[68233]: value = "task-2782095" [ 723.050550] env[68233]: _type = "Task" [ 723.050550] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.059862] env[68233]: DEBUG oslo_vmware.api [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.203930] env[68233]: DEBUG oslo_vmware.api [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782092, 'name': PowerOnVM_Task, 'duration_secs': 0.557891} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.204284] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.204532] env[68233]: INFO nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Took 9.06 seconds to spawn the instance on the hypervisor. [ 723.205041] env[68233]: DEBUG nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.205884] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c884fc31-9e8c-4139-b8e5-118595922fbf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.237659] env[68233]: INFO nova.compute.manager [-] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Took 1.30 seconds to deallocate network for instance. [ 723.467498] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.468063] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 723.472194] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.028s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.474013] env[68233]: INFO nova.compute.claims [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.567913] env[68233]: DEBUG oslo_vmware.api [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140771} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.567913] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 723.567913] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 723.567913] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.567913] env[68233]: INFO nova.compute.manager [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Took 1.14 seconds to destroy the instance on the hypervisor. [ 723.567913] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 723.567913] env[68233]: DEBUG nova.compute.manager [-] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 723.567913] env[68233]: DEBUG nova.network.neutron [-] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 723.734967] env[68233]: INFO nova.compute.manager [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Took 43.43 seconds to build instance. [ 723.745939] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.978353] env[68233]: DEBUG nova.compute.utils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 723.983013] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 723.983013] env[68233]: DEBUG nova.network.neutron [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 724.246847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5b3cb8b0-4113-4314-a6e6-f2bf6c42db03 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.220s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.342159] env[68233]: DEBUG nova.policy [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc2c677532ef4ebfa5c8e7357c2d1732', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb0965969ed647038bea9f0388a2df05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 724.458633] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.459399] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.459658] env[68233]: DEBUG nova.compute.manager [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.461854] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c248c19-bde3-465d-9d03-fe80f7893c79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.471046] env[68233]: DEBUG nova.compute.manager [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 724.471917] env[68233]: DEBUG nova.objects.instance [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'flavor' on Instance uuid 4a388705-7e00-45dc-8891-c6e587b1cdb8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 724.488231] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 724.700817] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "a5468df9-c54d-4014-8002-ef82f111a7a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.701082] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.701331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "a5468df9-c54d-4014-8002-ef82f111a7a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.701529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.701695] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.703725] env[68233]: INFO nova.compute.manager [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Terminating instance [ 724.752585] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 724.975255] env[68233]: DEBUG nova.network.neutron [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Successfully created port: 1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.045727] env[68233]: DEBUG nova.compute.manager [req-c812d511-36f7-4638-aa6e-6ffe56eda858 req-fb1bd00e-6563-481b-b03f-6eb9833131b4 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Received event network-vif-deleted-db2a881b-a7e3-40d6-9df5-f9280b97cfc9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 725.045727] env[68233]: INFO nova.compute.manager [req-c812d511-36f7-4638-aa6e-6ffe56eda858 req-fb1bd00e-6563-481b-b03f-6eb9833131b4 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Neutron deleted interface db2a881b-a7e3-40d6-9df5-f9280b97cfc9; detaching it from the instance and deleting it from the info cache [ 725.045727] env[68233]: DEBUG nova.network.neutron [req-c812d511-36f7-4638-aa6e-6ffe56eda858 req-fb1bd00e-6563-481b-b03f-6eb9833131b4 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.087572] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5bc4a6-9943-4941-85f5-16256fddcc06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.101899] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f0c9a5-71f0-4b0c-9168-71d46c13a414 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.138542] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34262e10-ed7c-4c19-a08f-28c3e33c4083 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.147300] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-708f6b57-78ea-4a90-93bb-b8798b599758 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.163778] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 725.208040] env[68233]: DEBUG nova.compute.manager [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 725.208626] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 725.209832] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7b81a1-32cc-43a1-a693-b09e98acc032 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.221160] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 725.222254] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9131279d-a486-4ff2-9f88-20c56fff9eee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.231512] env[68233]: DEBUG oslo_vmware.api [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 725.231512] env[68233]: value = "task-2782096" [ 725.231512] env[68233]: _type = "Task" [ 725.231512] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.242117] env[68233]: DEBUG oslo_vmware.api [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.289329] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.364439] env[68233]: DEBUG nova.network.neutron [-] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.483020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 725.483137] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9666868-bd90-4a5f-a4e2-f0f0b1d8d215 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.496999] env[68233]: DEBUG oslo_vmware.api [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 725.496999] env[68233]: value = "task-2782097" [ 725.496999] env[68233]: _type = "Task" [ 725.496999] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.506865] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 725.509504] env[68233]: DEBUG oslo_vmware.api [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.543940] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 725.544231] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 725.544420] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 725.544697] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 725.544955] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 725.545673] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 725.545673] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 725.545790] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 725.545968] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 725.546057] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 725.546309] env[68233]: DEBUG nova.virt.hardware [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 725.547069] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7577f62-681b-439f-a6dc-40bd7408b1a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.550536] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e367924c-d1e6-4007-92b7-40633c9e40b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.560211] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355a3002-199f-451b-b78b-35f5451ec44a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.567021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7cfaca-f292-4b53-b967-bf27c0b2362f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.603829] env[68233]: DEBUG nova.compute.manager [req-c812d511-36f7-4638-aa6e-6ffe56eda858 req-fb1bd00e-6563-481b-b03f-6eb9833131b4 service nova] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Detach interface failed, port_id=db2a881b-a7e3-40d6-9df5-f9280b97cfc9, reason: Instance 35cbc15b-48d8-4acd-a957-eec3421df1ce could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 725.689899] env[68233]: ERROR nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [req-27358f48-5cd2-4500-a3c2-52e223b1742e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-27358f48-5cd2-4500-a3c2-52e223b1742e"}]} [ 725.710949] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 725.728435] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 725.728842] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 725.742536] env[68233]: DEBUG oslo_vmware.api [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782096, 'name': PowerOffVM_Task, 'duration_secs': 0.310418} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.743761] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 725.746208] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 725.746394] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 725.746862] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-959ebba6-c6a7-442b-bda7-36db694d5726 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.766700] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 725.872198] env[68233]: INFO nova.compute.manager [-] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Took 2.31 seconds to deallocate network for instance. [ 725.890233] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 725.890233] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 725.890233] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Deleting the datastore file [datastore2] a5468df9-c54d-4014-8002-ef82f111a7a4 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 725.890233] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9156a309-df24-4bd5-bade-d5184ba04d20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.896191] env[68233]: DEBUG oslo_vmware.api [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for the task: (returnval){ [ 725.896191] env[68233]: value = "task-2782099" [ 725.896191] env[68233]: _type = "Task" [ 725.896191] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.905440] env[68233]: DEBUG oslo_vmware.api [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.014047] env[68233]: DEBUG oslo_vmware.api [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782097, 'name': PowerOffVM_Task, 'duration_secs': 0.389673} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.014366] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 726.014574] env[68233]: DEBUG nova.compute.manager [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 726.015575] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6694ff4b-1367-4011-a2d4-833e73c3791f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.289989] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "175ced9c-52f6-4577-a010-8fffc2876e6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.289989] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "175ced9c-52f6-4577-a010-8fffc2876e6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.374661] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57083d65-2eb0-46d0-a9c9-849d135eddf3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.381413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.385053] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b87ead-23b1-47f0-bb2f-62daa4df4388 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.421781] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cf0c1d-afeb-4221-89dd-3a4c30762a93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.430730] env[68233]: DEBUG oslo_vmware.api [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Task: {'id': task-2782099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143565} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.432992] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 726.433202] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 726.433376] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 726.433553] env[68233]: INFO nova.compute.manager [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Took 1.23 seconds to destroy the instance on the hypervisor. [ 726.433796] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 726.434084] env[68233]: DEBUG nova.compute.manager [-] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 726.434188] env[68233]: DEBUG nova.network.neutron [-] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 726.436847] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0b7486-0687-4d52-b103-db9c1b02d0ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.451469] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 726.533451] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bb85c491-83d5-4dde-88a3-5be4267e91bd tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.074s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.758292] env[68233]: DEBUG nova.network.neutron [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Successfully updated port: 1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 726.981011] env[68233]: ERROR nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [req-638ad08c-24d9-4527-94f2-646465ccd63f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-638ad08c-24d9-4527-94f2-646465ccd63f"}]} [ 727.007023] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 727.025757] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 727.025757] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.041247] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 727.075249] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 727.150466] env[68233]: DEBUG nova.objects.instance [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'flavor' on Instance uuid 4a388705-7e00-45dc-8891-c6e587b1cdb8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.167533] env[68233]: DEBUG nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Received event network-vif-plugged-1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 727.167956] env[68233]: DEBUG oslo_concurrency.lockutils [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] Acquiring lock "f7a1bfc5-7141-4764-b3fe-08d06020209a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.167956] env[68233]: DEBUG oslo_concurrency.lockutils [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.168571] env[68233]: DEBUG oslo_concurrency.lockutils [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.168571] env[68233]: DEBUG nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] No waiting events found dispatching network-vif-plugged-1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 727.168571] env[68233]: WARNING nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Received unexpected event network-vif-plugged-1078efb6-b35d-496e-aeb6-08489c2bfbea for instance with vm_state building and task_state spawning. [ 727.168571] env[68233]: DEBUG nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Received event network-changed-1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 727.169491] env[68233]: DEBUG nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Refreshing instance network info cache due to event network-changed-1078efb6-b35d-496e-aeb6-08489c2bfbea. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 727.169491] env[68233]: DEBUG oslo_concurrency.lockutils [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] Acquiring lock "refresh_cache-f7a1bfc5-7141-4764-b3fe-08d06020209a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.169491] env[68233]: DEBUG oslo_concurrency.lockutils [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] Acquired lock "refresh_cache-f7a1bfc5-7141-4764-b3fe-08d06020209a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.169491] env[68233]: DEBUG nova.network.neutron [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Refreshing network info cache for port 1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.261829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "refresh_cache-f7a1bfc5-7141-4764-b3fe-08d06020209a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.356162] env[68233]: DEBUG nova.network.neutron [-] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.659088] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.661222] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.661222] env[68233]: DEBUG nova.network.neutron [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 727.661222] env[68233]: DEBUG nova.objects.instance [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'info_cache' on Instance uuid 4a388705-7e00-45dc-8891-c6e587b1cdb8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 727.683270] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d3cc86a-4acf-4145-ae07-6eb5ce4e4a90 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.697594] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38e7fc7-9fc6-452d-9e97-c47c1945a51e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.735447] env[68233]: DEBUG nova.network.neutron [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.739805] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd2e203-63fa-4d64-ac07-607bd21a6925 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.748044] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb95cd40-0c81-4355-a1f4-dc77cbd89025 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.769569] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 727.798397] env[68233]: DEBUG nova.network.neutron [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 727.861054] env[68233]: INFO nova.compute.manager [-] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Took 1.42 seconds to deallocate network for instance. [ 728.164371] env[68233]: DEBUG nova.objects.base [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Object Instance<4a388705-7e00-45dc-8891-c6e587b1cdb8> lazy-loaded attributes: flavor,info_cache {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 728.289770] env[68233]: ERROR nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [req-4eac713a-d5ff-4784-9575-8611d1135e55] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4eac713a-d5ff-4784-9575-8611d1135e55"}]} [ 728.303334] env[68233]: DEBUG oslo_concurrency.lockutils [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] Releasing lock "refresh_cache-f7a1bfc5-7141-4764-b3fe-08d06020209a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.304989] env[68233]: DEBUG nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Received event network-vif-deleted-6a95e672-349a-487c-b8cb-da7a75d3ec95 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 728.304989] env[68233]: INFO nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Neutron deleted interface 6a95e672-349a-487c-b8cb-da7a75d3ec95; detaching it from the instance and deleting it from the info cache [ 728.304989] env[68233]: DEBUG nova.network.neutron [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.305305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired lock "refresh_cache-f7a1bfc5-7141-4764-b3fe-08d06020209a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.305460] env[68233]: DEBUG nova.network.neutron [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.318397] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 728.343019] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 728.343019] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 728.358394] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 728.366430] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.386481] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 728.456191] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "09e4644d-d845-47f4-8748-925f739863b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 728.456615] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "09e4644d-d845-47f4-8748-925f739863b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.809750] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e04d8ee-8bc1-4f67-b720-edaafde2208b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.823852] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9ec3ebf-68e2-4fdf-ac05-7b14003daf6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.856394] env[68233]: DEBUG nova.compute.manager [req-e53c3f77-b171-4306-ac99-d8b65302c8ae req-0b63be97-ddc8-4f7d-9784-c2a29e45f0c7 service nova] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Detach interface failed, port_id=6a95e672-349a-487c-b8cb-da7a75d3ec95, reason: Instance a5468df9-c54d-4014-8002-ef82f111a7a4 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 728.859526] env[68233]: DEBUG nova.network.neutron [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.904965] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337f958d-afd4-4636-ba62-38ba962cc16c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.914318] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010b1073-b775-46fa-9d4b-513a798d938e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.945956] env[68233]: DEBUG nova.network.neutron [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.949929] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074e23b2-38e4-4488-8467-a44fabae927c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.958309] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77838744-5700-49fd-b34e-557c0c2751bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.972241] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.022687] env[68233]: DEBUG nova.network.neutron [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Updating instance_info_cache with network_info: [{"id": "1078efb6-b35d-496e-aeb6-08489c2bfbea", "address": "fa:16:3e:fc:b7:8f", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1078efb6-b3", "ovs_interfaceid": "1078efb6-b35d-496e-aeb6-08489c2bfbea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.453326] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.508703] env[68233]: DEBUG nova.scheduler.client.report [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 66 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 729.508703] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 66 to 67 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 729.508703] env[68233]: DEBUG nova.compute.provider_tree [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 729.525766] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Releasing lock "refresh_cache-f7a1bfc5-7141-4764-b3fe-08d06020209a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.526056] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Instance network_info: |[{"id": "1078efb6-b35d-496e-aeb6-08489c2bfbea", "address": "fa:16:3e:fc:b7:8f", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.112", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1078efb6-b3", "ovs_interfaceid": "1078efb6-b35d-496e-aeb6-08489c2bfbea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 729.526427] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:b7:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1078efb6-b35d-496e-aeb6-08489c2bfbea', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.534309] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Creating folder: Project (eb0965969ed647038bea9f0388a2df05). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.534786] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-87a92e21-8c56-489e-9f45-735f3ae038c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.546794] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Created folder: Project (eb0965969ed647038bea9f0388a2df05) in parent group-v559223. [ 729.547021] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Creating folder: Instances. Parent ref: group-v559333. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.547261] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-478a6402-3cc3-4149-aa3d-876e4c923d54 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.556655] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Created folder: Instances in parent group-v559333. [ 729.556874] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.557060] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 729.557254] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8af54532-aeab-4f5c-9b3e-e91f46578b94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.577327] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 729.577327] env[68233]: value = "task-2782102" [ 729.577327] env[68233]: _type = "Task" [ 729.577327] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.584905] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782102, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.013770] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.541s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.014234] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 730.017025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.960s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.018357] env[68233]: INFO nova.compute.claims [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 730.087337] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782102, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.462563] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 730.462955] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7d447aa-b355-435a-a44f-d1137cda6442 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.471215] env[68233]: DEBUG oslo_vmware.api [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 730.471215] env[68233]: value = "task-2782103" [ 730.471215] env[68233]: _type = "Task" [ 730.471215] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.479958] env[68233]: DEBUG oslo_vmware.api [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.523442] env[68233]: DEBUG nova.compute.utils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 730.526747] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 730.526916] env[68233]: DEBUG nova.network.neutron [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 730.568131] env[68233]: DEBUG nova.policy [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '47750dd9b4cd4900a2e10596d267bbf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd53bb0dba91d48ccb92d5fa899086f66', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 730.588263] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782102, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.833646] env[68233]: DEBUG nova.network.neutron [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Successfully created port: f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 730.981014] env[68233]: DEBUG oslo_vmware.api [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782103, 'name': PowerOnVM_Task, 'duration_secs': 0.401097} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.985412] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 730.985630] env[68233]: DEBUG nova.compute.manager [None req-ae07f9d5-9d14-4914-802e-048c51b17567 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.986437] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8862c195-2fb4-4905-bc4c-9dec2e08d67e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.027403] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 731.090351] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782102, 'name': CreateVM_Task, 'duration_secs': 1.325701} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.090351] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 731.090351] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.094072] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.095109] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 731.095109] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40f75c51-acd2-4175-b941-059caba82a0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.099423] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 731.099423] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52899b91-556e-e07e-ef1a-c4ab730ee11a" [ 731.099423] env[68233]: _type = "Task" [ 731.099423] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.108817] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52899b91-556e-e07e-ef1a-c4ab730ee11a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.511181] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e2913a-1a9d-4ca1-8915-3f0218954ac4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.519562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0504e67-571c-4867-9cec-171d71e303f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.557441] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117466eb-4c71-4ce7-94f8-5ffb6d39a2eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.565875] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d2ea78-a23a-45eb-9b08-338ca12934fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.579770] env[68233]: DEBUG nova.compute.provider_tree [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.611031] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52899b91-556e-e07e-ef1a-c4ab730ee11a, 'name': SearchDatastore_Task, 'duration_secs': 0.012142} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.611085] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.611401] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.612677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.612677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.612677] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.612677] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f589d184-a1ab-4369-bc3f-10139db89ed5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.621443] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.621615] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.622454] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-497d06d0-0238-4700-9e6a-cc5b2ffb8122 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.627748] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 731.627748] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a7a461-7a8d-8078-53a9-869747df9c4d" [ 731.627748] env[68233]: _type = "Task" [ 731.627748] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.635738] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a7a461-7a8d-8078-53a9-869747df9c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.062476] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 732.085173] env[68233]: DEBUG nova.scheduler.client.report [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 732.090287] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 732.090518] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.090674] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 732.090852] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.091060] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 732.091158] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 732.091361] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 732.091521] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 732.091687] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 732.091848] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 732.092027] env[68233]: DEBUG nova.virt.hardware [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 732.093328] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b62a9a5-03e9-4039-8e58-f640868ebca2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.102131] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d26a19-5566-4fb0-897c-3e3a826eeb13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.138252] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a7a461-7a8d-8078-53a9-869747df9c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.008375} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.139065] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e53bd0e0-18bc-400a-95e9-f77b46664d95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.145159] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 732.145159] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527dfa59-0d27-e035-0c01-78e000be4e15" [ 732.145159] env[68233]: _type = "Task" [ 732.145159] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.154542] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527dfa59-0d27-e035-0c01-78e000be4e15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.264604] env[68233]: DEBUG nova.compute.manager [req-02f534ea-81f7-478b-b084-b0a3d6147084 req-bb36dd30-6304-4ca0-be25-3dd6c615a4c5 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Received event network-vif-plugged-f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 732.264604] env[68233]: DEBUG oslo_concurrency.lockutils [req-02f534ea-81f7-478b-b084-b0a3d6147084 req-bb36dd30-6304-4ca0-be25-3dd6c615a4c5 service nova] Acquiring lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 732.264604] env[68233]: DEBUG oslo_concurrency.lockutils [req-02f534ea-81f7-478b-b084-b0a3d6147084 req-bb36dd30-6304-4ca0-be25-3dd6c615a4c5 service nova] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.264604] env[68233]: DEBUG oslo_concurrency.lockutils [req-02f534ea-81f7-478b-b084-b0a3d6147084 req-bb36dd30-6304-4ca0-be25-3dd6c615a4c5 service nova] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.264604] env[68233]: DEBUG nova.compute.manager [req-02f534ea-81f7-478b-b084-b0a3d6147084 req-bb36dd30-6304-4ca0-be25-3dd6c615a4c5 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] No waiting events found dispatching network-vif-plugged-f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 732.264982] env[68233]: WARNING nova.compute.manager [req-02f534ea-81f7-478b-b084-b0a3d6147084 req-bb36dd30-6304-4ca0-be25-3dd6c615a4c5 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Received unexpected event network-vif-plugged-f7568651-e038-4f28-85d3-597b8faad3fb for instance with vm_state building and task_state spawning. [ 732.351979] env[68233]: DEBUG nova.network.neutron [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Successfully updated port: f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 732.597948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.597948] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 732.600424] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.286s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.600589] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.602603] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.455s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.602784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.604473] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.353s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.604650] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 732.606263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.406s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 732.607621] env[68233]: INFO nova.compute.claims [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.636146] env[68233]: INFO nova.scheduler.client.report [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Deleted allocations for instance 636b6b36-3ab5-4851-a232-d27b54895595 [ 732.641804] env[68233]: INFO nova.scheduler.client.report [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Deleted allocations for instance 0f7d80d2-5c34-42f7-a14a-97f9625675a8 [ 732.660638] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527dfa59-0d27-e035-0c01-78e000be4e15, 'name': SearchDatastore_Task, 'duration_secs': 0.012077} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.662024] env[68233]: INFO nova.scheduler.client.report [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Deleted allocations for instance 88d67405-b8c6-484a-b178-68a8babb3708 [ 732.663209] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.663489] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f7a1bfc5-7141-4764-b3fe-08d06020209a/f7a1bfc5-7141-4764-b3fe-08d06020209a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.666382] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8b430a8-2fe3-443c-bd9f-493dda1c5b23 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.674509] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 732.674509] env[68233]: value = "task-2782104" [ 732.674509] env[68233]: _type = "Task" [ 732.674509] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.854857] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.855038] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.855166] env[68233]: DEBUG nova.network.neutron [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 733.112488] env[68233]: DEBUG nova.compute.utils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 733.123115] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 733.123115] env[68233]: DEBUG nova.network.neutron [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 733.151268] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14ced1ee-c807-4afd-9d52-609724d9f2f9 tempest-ServerDiagnosticsNegativeTest-1422247078 tempest-ServerDiagnosticsNegativeTest-1422247078-project-member] Lock "636b6b36-3ab5-4851-a232-d27b54895595" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.457s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.160395] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c6f2021-3281-4c14-9ace-ed7587b93db5 tempest-InstanceActionsTestJSON-1758791882 tempest-InstanceActionsTestJSON-1758791882-project-member] Lock "0f7d80d2-5c34-42f7-a14a-97f9625675a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.298s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.169272] env[68233]: DEBUG nova.policy [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e8b0a88e3304675bd5910b4c4ce97f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5c12a9e11dd403dbe6bdc1c7793040a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 733.174675] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d69c1eeb-f795-4492-a59a-5a81ef525645 tempest-InstanceActionsNegativeTestJSON-1772991073 tempest-InstanceActionsNegativeTestJSON-1772991073-project-member] Lock "88d67405-b8c6-484a-b178-68a8babb3708" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.590s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 733.189749] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782104, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.417127] env[68233]: DEBUG nova.network.neutron [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 733.622733] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 733.696211] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.702404] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f7a1bfc5-7141-4764-b3fe-08d06020209a/f7a1bfc5-7141-4764-b3fe-08d06020209a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 733.702404] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.702404] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7e7ea35-b6ec-4485-9e2d-2cea95443aac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.714720] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 733.714720] env[68233]: value = "task-2782105" [ 733.714720] env[68233]: _type = "Task" [ 733.714720] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.727488] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.736827] env[68233]: DEBUG nova.network.neutron [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [{"id": "f7568651-e038-4f28-85d3-597b8faad3fb", "address": "fa:16:3e:e9:b0:f2", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7568651-e0", "ovs_interfaceid": "f7568651-e038-4f28-85d3-597b8faad3fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.806405] env[68233]: DEBUG nova.network.neutron [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Successfully created port: 95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.226016] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f185c2-1092-43f8-abd7-a2b40ce9d97d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.233394] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062779} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.234781] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 734.235646] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18d53fc-0890-49f3-81a2-172876a5d9b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.242442] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f99f24-9eed-49a6-9eeb-9aea44e2d949 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.246350] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.246350] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance network_info: |[{"id": "f7568651-e038-4f28-85d3-597b8faad3fb", "address": "fa:16:3e:e9:b0:f2", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7568651-e0", "ovs_interfaceid": "f7568651-e038-4f28-85d3-597b8faad3fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 734.256379] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:b0:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7568651-e038-4f28-85d3-597b8faad3fb', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 734.263861] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 734.272301] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] f7a1bfc5-7141-4764-b3fe-08d06020209a/f7a1bfc5-7141-4764-b3fe-08d06020209a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 734.273089] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 734.273325] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-394752f7-ab5c-4174-8a11-5b6d2535599a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.311615] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f276084-0966-4203-8507-419abf2b90e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.327996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfd9f6d0-466d-44ea-8d20-92473deefb6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.331660] env[68233]: DEBUG nova.compute.manager [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Received event network-changed-f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 734.331770] env[68233]: DEBUG nova.compute.manager [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Refreshing instance network info cache due to event network-changed-f7568651-e038-4f28-85d3-597b8faad3fb. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 734.331934] env[68233]: DEBUG oslo_concurrency.lockutils [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] Acquiring lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.332087] env[68233]: DEBUG oslo_concurrency.lockutils [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] Acquired lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.332249] env[68233]: DEBUG nova.network.neutron [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Refreshing network info cache for port f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 734.335476] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 734.335476] env[68233]: value = "task-2782106" [ 734.335476] env[68233]: _type = "Task" [ 734.335476] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.342715] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.342715] env[68233]: value = "task-2782107" [ 734.342715] env[68233]: _type = "Task" [ 734.342715] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.344712] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e54515-29f8-463a-8279-fb7578631e74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.357972] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782106, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.362303] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782107, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.372459] env[68233]: DEBUG nova.compute.provider_tree [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 734.636623] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 734.667037] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 734.667382] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 734.667464] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 734.667603] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 734.672119] env[68233]: DEBUG nova.virt.hardware [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 734.672119] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e8ffc2-0219-495f-baf1-d29d3531fbb3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.681587] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d56eb5a-8eda-4192-ae45-fa842c3fe6eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.853381] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782106, 'name': ReconfigVM_Task, 'duration_secs': 0.408552} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.856696] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Reconfigured VM instance instance-00000026 to attach disk [datastore2] f7a1bfc5-7141-4764-b3fe-08d06020209a/f7a1bfc5-7141-4764-b3fe-08d06020209a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.856696] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-712c2c52-1a8a-4b44-a609-018267865948 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.863433] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782107, 'name': CreateVM_Task, 'duration_secs': 0.363145} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.865097] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 734.865698] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 734.865698] env[68233]: value = "task-2782108" [ 734.865698] env[68233]: _type = "Task" [ 734.865698] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.866473] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.866540] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.867018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 734.867683] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edcd7f0f-33f1-4a8c-9579-1e9c7efbfb8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.885748] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782108, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.885748] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 734.885748] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c8959-5198-eb44-ac79-705a5111e166" [ 734.885748] env[68233]: _type = "Task" [ 734.885748] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.891616] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c8959-5198-eb44-ac79-705a5111e166, 'name': SearchDatastore_Task, 'duration_secs': 0.009612} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.892120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.892221] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.892460] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.892613] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.892782] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.893344] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a62d4cf-d22b-4f6f-8125-36bdb4ec1fdc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.905025] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.905215] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.905970] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4e4c4d3-e5a7-4a8f-8254-92cb357e123b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.911453] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 734.911453] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526ddf0a-00bb-b965-6b8b-92fbedcab14e" [ 734.911453] env[68233]: _type = "Task" [ 734.911453] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.919658] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526ddf0a-00bb-b965-6b8b-92fbedcab14e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.933503] env[68233]: DEBUG nova.scheduler.client.report [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 67 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 734.933793] env[68233]: DEBUG nova.compute.provider_tree [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 67 to 68 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 734.933981] env[68233]: DEBUG nova.compute.provider_tree [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 735.119723] env[68233]: DEBUG nova.objects.instance [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lazy-loading 'flavor' on Instance uuid 75f58a50-7891-42df-8820-c997300a3159 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.306350] env[68233]: DEBUG nova.network.neutron [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updated VIF entry in instance network info cache for port f7568651-e038-4f28-85d3-597b8faad3fb. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 735.306350] env[68233]: DEBUG nova.network.neutron [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [{"id": "f7568651-e038-4f28-85d3-597b8faad3fb", "address": "fa:16:3e:e9:b0:f2", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7568651-e0", "ovs_interfaceid": "f7568651-e038-4f28-85d3-597b8faad3fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.378406] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782108, 'name': Rename_Task, 'duration_secs': 0.138301} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.378406] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 735.378630] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12124992-ca7f-4b88-ad05-0f89ee232b19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.388567] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 735.388567] env[68233]: value = "task-2782109" [ 735.388567] env[68233]: _type = "Task" [ 735.388567] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.396388] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.421037] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526ddf0a-00bb-b965-6b8b-92fbedcab14e, 'name': SearchDatastore_Task, 'duration_secs': 0.009206} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.421861] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8821c1ee-d57e-43ae-85c1-45928703a797 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.428371] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 735.428371] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523a215a-6f2c-5c82-eb49-ec79acb77c64" [ 735.428371] env[68233]: _type = "Task" [ 735.428371] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.435947] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523a215a-6f2c-5c82-eb49-ec79acb77c64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.441011] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.834s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.441781] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 735.443868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.901s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 735.444058] env[68233]: DEBUG nova.objects.instance [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lazy-loading 'resources' on Instance uuid 769956c6-7824-41db-9779-fc1b5f53dd94 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.628779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.629009] env[68233]: DEBUG oslo_concurrency.lockutils [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.812928] env[68233]: DEBUG oslo_concurrency.lockutils [req-a752eab7-8611-4b73-bb6b-c5b5b2b4c1e0 req-409e435c-cd52-4231-9749-bc90b5f2debd service nova] Releasing lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.904459] env[68233]: DEBUG oslo_vmware.api [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782109, 'name': PowerOnVM_Task, 'duration_secs': 0.464878} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.904885] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.905216] env[68233]: INFO nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Took 10.40 seconds to spawn the instance on the hypervisor. [ 735.905507] env[68233]: DEBUG nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 735.906402] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4574f87a-1f39-4e99-b565-378bc661d8de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.948168] env[68233]: DEBUG nova.compute.utils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 735.952762] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523a215a-6f2c-5c82-eb49-ec79acb77c64, 'name': SearchDatastore_Task, 'duration_secs': 0.009286} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.954121] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 735.954396] env[68233]: DEBUG nova.network.neutron [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 735.957794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.957794] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 86528c8b-b51e-480d-a7bf-013d990d51ca/86528c8b-b51e-480d-a7bf-013d990d51ca.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.957794] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cfa5ccd5-b197-4c64-a99e-4598ae4ba94a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.966104] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 735.966104] env[68233]: value = "task-2782110" [ 735.966104] env[68233]: _type = "Task" [ 735.966104] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.977749] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782110, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.044674] env[68233]: DEBUG nova.policy [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '839b33e7aa11482882403ddc2319583f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '853a057cfba3400ba05c89cb1d292f61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 736.090668] env[68233]: DEBUG nova.network.neutron [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Successfully updated port: 95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 736.351644] env[68233]: DEBUG nova.network.neutron [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 736.430593] env[68233]: INFO nova.compute.manager [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Took 46.61 seconds to build instance. [ 736.455555] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 736.478621] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782110, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468522} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.479983] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 86528c8b-b51e-480d-a7bf-013d990d51ca/86528c8b-b51e-480d-a7bf-013d990d51ca.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 736.479983] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 736.479983] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5b3f190-6cc4-4479-96fd-15c24fcf3f47 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.487440] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 736.487440] env[68233]: value = "task-2782111" [ 736.487440] env[68233]: _type = "Task" [ 736.487440] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.504299] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.539178] env[68233]: DEBUG nova.compute.manager [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Received event network-vif-plugged-95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 736.540030] env[68233]: DEBUG oslo_concurrency.lockutils [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] Acquiring lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 736.540318] env[68233]: DEBUG oslo_concurrency.lockutils [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 736.540621] env[68233]: DEBUG oslo_concurrency.lockutils [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.542755] env[68233]: DEBUG nova.compute.manager [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] No waiting events found dispatching network-vif-plugged-95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 736.542755] env[68233]: WARNING nova.compute.manager [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Received unexpected event network-vif-plugged-95df51ae-391f-43ee-976f-70e2a4bb769a for instance with vm_state building and task_state spawning. [ 736.542755] env[68233]: DEBUG nova.compute.manager [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Received event network-changed-95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 736.542755] env[68233]: DEBUG nova.compute.manager [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Refreshing instance network info cache due to event network-changed-95df51ae-391f-43ee-976f-70e2a4bb769a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 736.542755] env[68233]: DEBUG oslo_concurrency.lockutils [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] Acquiring lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.542755] env[68233]: DEBUG oslo_concurrency.lockutils [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] Acquired lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.544098] env[68233]: DEBUG nova.network.neutron [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Refreshing network info cache for port 95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 736.596522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.630661] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabe783d-fbd4-46da-97d1-8ea14d65d02c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.643567] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534fb218-5675-4566-907d-265b353803f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.676282] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6e7e89-a68d-4ed4-89d9-eb8bca0087ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.683853] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9dd2f91-7ffd-4e70-9f2d-8fe2272dc8c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.698221] env[68233]: DEBUG nova.compute.provider_tree [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.932760] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fcebcc99-9361-4922-804f-60d338dd3fc2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.191s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.964915] env[68233]: DEBUG nova.network.neutron [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Successfully created port: 5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 737.009053] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.175493} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.009222] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.010131] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67317082-0242-4555-a5f6-177f28cb1b7d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.040801] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] 86528c8b-b51e-480d-a7bf-013d990d51ca/86528c8b-b51e-480d-a7bf-013d990d51ca.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.041527] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b06d49ef-5eee-49be-b7d5-2b5f43c4f0d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.064422] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 737.064422] env[68233]: value = "task-2782112" [ 737.064422] env[68233]: _type = "Task" [ 737.064422] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.074093] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.124619] env[68233]: DEBUG nova.network.neutron [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 737.201169] env[68233]: DEBUG nova.scheduler.client.report [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.435745] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.471199] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 737.510130] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 737.510433] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.510608] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 737.511145] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.511334] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 737.511457] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 737.511664] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 737.511817] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 737.512164] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 737.513222] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 737.513469] env[68233]: DEBUG nova.virt.hardware [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 737.514680] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee299fe-2ff2-42ff-98f9-8e302cb968bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.524234] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3deb57f8-95c7-4bdb-8ed8-7405d8a2d170 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.574999] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782112, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.614756] env[68233]: DEBUG nova.network.neutron [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.646679] env[68233]: DEBUG nova.network.neutron [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.707805] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.264s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.710438] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.666s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.711891] env[68233]: INFO nova.compute.claims [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.733282] env[68233]: INFO nova.scheduler.client.report [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Deleted allocations for instance 769956c6-7824-41db-9779-fc1b5f53dd94 [ 737.807258] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "65f9fe09-97dc-4988-bae4-243d60e33be9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.807697] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "65f9fe09-97dc-4988-bae4-243d60e33be9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.967401] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.078508] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782112, 'name': ReconfigVM_Task, 'duration_secs': 0.783173} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.080381] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Reconfigured VM instance instance-00000027 to attach disk [datastore2] 86528c8b-b51e-480d-a7bf-013d990d51ca/86528c8b-b51e-480d-a7bf-013d990d51ca.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.080381] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-904f9557-fefb-4ebd-9687-012372bb3a30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.088251] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 738.088251] env[68233]: value = "task-2782113" [ 738.088251] env[68233]: _type = "Task" [ 738.088251] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.103824] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782113, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.117704] env[68233]: DEBUG oslo_concurrency.lockutils [req-2567e2e3-8a1b-4947-9126-c80564680e05 req-04297eac-f929-4021-8969-03ba14c6c413 service nova] Releasing lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.119134] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquired lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.119134] env[68233]: DEBUG nova.network.neutron [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 738.152027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.152027] env[68233]: DEBUG nova.compute.manager [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Inject network info {{(pid=68233) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 738.152027] env[68233]: DEBUG nova.compute.manager [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] network_info to inject: |[{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 738.155980] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Reconfiguring VM instance to set the machine id {{(pid=68233) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 738.158657] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ff427ce-d178-4199-8a8c-1c52278d8be8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.179787] env[68233]: DEBUG oslo_vmware.api [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 738.179787] env[68233]: value = "task-2782114" [ 738.179787] env[68233]: _type = "Task" [ 738.179787] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.188966] env[68233]: DEBUG oslo_vmware.api [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782114, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.244239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-89b09bf7-b099-4f1f-8109-7d44deea3e58 tempest-ImagesOneServerTestJSON-1988231464 tempest-ImagesOneServerTestJSON-1988231464-project-member] Lock "769956c6-7824-41db-9779-fc1b5f53dd94" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.269s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 738.603092] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782113, 'name': Rename_Task, 'duration_secs': 0.19949} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.603536] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.603879] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43910fb7-8c22-4e28-b7ac-ea5db58c7398 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.610513] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 738.610513] env[68233]: value = "task-2782115" [ 738.610513] env[68233]: _type = "Task" [ 738.610513] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.620333] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782115, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.638129] env[68233]: DEBUG nova.objects.instance [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lazy-loading 'flavor' on Instance uuid 75f58a50-7891-42df-8820-c997300a3159 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 738.667195] env[68233]: DEBUG nova.network.neutron [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 738.695581] env[68233]: DEBUG oslo_vmware.api [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782114, 'name': ReconfigVM_Task, 'duration_secs': 0.160921} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.701114] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-787b9314-a334-404d-a100-714ecdaa55c7 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Reconfigured VM instance to set the machine id {{(pid=68233) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 738.911058] env[68233]: DEBUG nova.network.neutron [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updating instance_info_cache with network_info: [{"id": "95df51ae-391f-43ee-976f-70e2a4bb769a", "address": "fa:16:3e:24:9e:48", "network": {"id": "2e377502-5ec6-4c6f-9049-a2edcac7bac1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-584462734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5c12a9e11dd403dbe6bdc1c7793040a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95df51ae-39", "ovs_interfaceid": "95df51ae-391f-43ee-976f-70e2a4bb769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.997378] env[68233]: DEBUG nova.compute.manager [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 738.997631] env[68233]: DEBUG nova.compute.manager [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing instance network info cache due to event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 738.998491] env[68233]: DEBUG oslo_concurrency.lockutils [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.998491] env[68233]: DEBUG oslo_concurrency.lockutils [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.998491] env[68233]: DEBUG nova.network.neutron [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 739.073613] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.073897] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.130301] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782115, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.148684] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 739.392457] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3899fc-6d85-4d17-b849-0cc988826641 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.403022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f79c25f-d99e-444c-b966-62c5dec21084 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.438151] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Releasing lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 739.438793] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Instance network_info: |[{"id": "95df51ae-391f-43ee-976f-70e2a4bb769a", "address": "fa:16:3e:24:9e:48", "network": {"id": "2e377502-5ec6-4c6f-9049-a2edcac7bac1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-584462734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5c12a9e11dd403dbe6bdc1c7793040a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95df51ae-39", "ovs_interfaceid": "95df51ae-391f-43ee-976f-70e2a4bb769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 739.440129] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:9e:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '939c05b6-8f31-4f3a-95ac-6297e0bd243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95df51ae-391f-43ee-976f-70e2a4bb769a', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 739.447871] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Creating folder: Project (d5c12a9e11dd403dbe6bdc1c7793040a). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.448388] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6725d25b-ceec-4598-9da2-c2661b5e79fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.451628] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1a271d9-4ebe-4b92-a319-b90171a7b1d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.458866] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d267011-50cf-4d37-a53d-4fa7f820da00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.464146] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Created folder: Project (d5c12a9e11dd403dbe6bdc1c7793040a) in parent group-v559223. [ 739.464351] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Creating folder: Instances. Parent ref: group-v559337. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 739.464939] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-559d86ac-3475-4b46-a5c9-73e4be6e1f41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.474879] env[68233]: DEBUG nova.compute.provider_tree [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.485262] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Created folder: Instances in parent group-v559337. [ 739.485262] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.485262] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 739.485262] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00847706-56fa-4cbb-b629-f9554c8d373f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.508355] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 739.508355] env[68233]: value = "task-2782118" [ 739.508355] env[68233]: _type = "Task" [ 739.508355] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.517360] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782118, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.578596] env[68233]: DEBUG nova.compute.manager [req-ef63f7d7-fe13-4596-9e35-cf7c780aa793 req-cef26e63-ae73-48c5-960c-a6820340f656 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Received event network-vif-plugged-5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 739.579207] env[68233]: DEBUG oslo_concurrency.lockutils [req-ef63f7d7-fe13-4596-9e35-cf7c780aa793 req-cef26e63-ae73-48c5-960c-a6820340f656 service nova] Acquiring lock "2a88648c-f00d-4d7b-905d-e70c327e248a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.579207] env[68233]: DEBUG oslo_concurrency.lockutils [req-ef63f7d7-fe13-4596-9e35-cf7c780aa793 req-cef26e63-ae73-48c5-960c-a6820340f656 service nova] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.579207] env[68233]: DEBUG oslo_concurrency.lockutils [req-ef63f7d7-fe13-4596-9e35-cf7c780aa793 req-cef26e63-ae73-48c5-960c-a6820340f656 service nova] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.579467] env[68233]: DEBUG nova.compute.manager [req-ef63f7d7-fe13-4596-9e35-cf7c780aa793 req-cef26e63-ae73-48c5-960c-a6820340f656 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] No waiting events found dispatching network-vif-plugged-5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 739.579518] env[68233]: WARNING nova.compute.manager [req-ef63f7d7-fe13-4596-9e35-cf7c780aa793 req-cef26e63-ae73-48c5-960c-a6820340f656 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Received unexpected event network-vif-plugged-5be37989-5969-49ee-9609-29f58ff75d61 for instance with vm_state building and task_state spawning. [ 739.619028] env[68233]: DEBUG nova.network.neutron [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Successfully updated port: 5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 739.626274] env[68233]: DEBUG oslo_vmware.api [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782115, 'name': PowerOnVM_Task, 'duration_secs': 0.8784} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.626666] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.626930] env[68233]: INFO nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Took 7.56 seconds to spawn the instance on the hypervisor. [ 739.627823] env[68233]: DEBUG nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.628232] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f90ae43-6f2b-4a47-9556-af7a9ff28e3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.778229] env[68233]: DEBUG nova.network.neutron [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updated VIF entry in instance network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.778656] env[68233]: DEBUG nova.network.neutron [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.978774] env[68233]: DEBUG nova.scheduler.client.report [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.019270] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782118, 'name': CreateVM_Task, 'duration_secs': 0.361238} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.019589] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 740.020660] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.020930] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.021367] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 740.021790] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5df1087a-fdc2-4b05-bf92-145205d44c9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.028124] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 740.028124] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ee9759-41a6-e30b-dedb-d25f230fad70" [ 740.028124] env[68233]: _type = "Task" [ 740.028124] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.041445] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ee9759-41a6-e30b-dedb-d25f230fad70, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.041764] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.041988] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 740.042260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.042410] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.042591] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 740.042880] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-389d7667-d6f1-400c-97fb-e912b42b9bf4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.050414] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 740.051123] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 740.052605] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f80c9b8-8f3f-4202-aee5-bb39ca045232 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.058808] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 740.058808] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5256b1cd-c369-be3d-b5f6-fe09837f40d0" [ 740.058808] env[68233]: _type = "Task" [ 740.058808] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.066580] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5256b1cd-c369-be3d-b5f6-fe09837f40d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.122572] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "refresh_cache-2a88648c-f00d-4d7b-905d-e70c327e248a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.122964] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "refresh_cache-2a88648c-f00d-4d7b-905d-e70c327e248a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.123240] env[68233]: DEBUG nova.network.neutron [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.149573] env[68233]: INFO nova.compute.manager [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Took 48.72 seconds to build instance. [ 740.284063] env[68233]: DEBUG oslo_concurrency.lockutils [req-02727ef5-7362-4fa0-81d0-c307aa16af01 req-4559e35f-2516-4810-a7bf-3a76bad69d05 service nova] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.284063] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 740.484837] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.774s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.485371] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.490031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.336s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.492023] env[68233]: INFO nova.compute.claims [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.573491] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5256b1cd-c369-be3d-b5f6-fe09837f40d0, 'name': SearchDatastore_Task, 'duration_secs': 0.008609} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.578079] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1578fa87-3bce-433d-9075-14a5f4973121 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.589808] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 740.589808] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5290203b-9572-49c9-fac0-673f17eb34b4" [ 740.589808] env[68233]: _type = "Task" [ 740.589808] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.606751] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5290203b-9572-49c9-fac0-673f17eb34b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009335} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.606751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.607183] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd/6c34d7ce-7bf1-4f88-812f-adc1eb5353dd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 740.607183] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b77fddc-b24b-4d3a-9966-c4780b2f73be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.616756] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 740.616756] env[68233]: value = "task-2782119" [ 740.616756] env[68233]: _type = "Task" [ 740.616756] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.635507] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782119, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.651604] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9f605b64-d6dd-4636-a39c-7e436aa4f2f6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.810s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.698272] env[68233]: DEBUG nova.network.neutron [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 740.765206] env[68233]: DEBUG nova.network.neutron [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 740.942976] env[68233]: DEBUG nova.network.neutron [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Updating instance_info_cache with network_info: [{"id": "5be37989-5969-49ee-9609-29f58ff75d61", "address": "fa:16:3e:93:33:1a", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5be37989-59", "ovs_interfaceid": "5be37989-5969-49ee-9609-29f58ff75d61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.992104] env[68233]: DEBUG nova.compute.utils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 740.993993] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 740.994288] env[68233]: DEBUG nova.network.neutron [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 741.129387] env[68233]: DEBUG nova.policy [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4510470db9247dd808c5d50a83a9758', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd920fb0b5ad9405ba979bcd84dd23948', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 741.145746] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782119, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486148} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.145746] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd/6c34d7ce-7bf1-4f88-812f-adc1eb5353dd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 741.145746] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 741.145746] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b14674d2-0120-4eac-b177-5a58ad553073 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.154632] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 741.159122] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 741.159122] env[68233]: value = "task-2782120" [ 741.159122] env[68233]: _type = "Task" [ 741.159122] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.172811] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.331628] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "28af332b-4f9b-4474-afdc-ab17e92df6e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.332136] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.450020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "refresh_cache-2a88648c-f00d-4d7b-905d-e70c327e248a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 741.450020] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Instance network_info: |[{"id": "5be37989-5969-49ee-9609-29f58ff75d61", "address": "fa:16:3e:93:33:1a", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5be37989-59", "ovs_interfaceid": "5be37989-5969-49ee-9609-29f58ff75d61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 741.450020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:93:33:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5be37989-5969-49ee-9609-29f58ff75d61', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 741.456237] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.458894] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 741.459289] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f782c2dc-d9c9-4999-a1aa-fbcc1951fcf9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.486472] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 741.486472] env[68233]: value = "task-2782121" [ 741.486472] env[68233]: _type = "Task" [ 741.486472] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.494646] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782121, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.502573] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.518117] env[68233]: DEBUG nova.compute.manager [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 741.518857] env[68233]: DEBUG nova.compute.manager [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing instance network info cache due to event network-changed-cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 741.519193] env[68233]: DEBUG oslo_concurrency.lockutils [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] Acquiring lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.668576] env[68233]: DEBUG nova.network.neutron [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.681549] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06168} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.681549] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 741.681549] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0603769e-c76f-4121-b3ad-0ea9b95a19a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.709616] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd/6c34d7ce-7bf1-4f88-812f-adc1eb5353dd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 741.714340] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.715149] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78f56d2a-d52f-4f6f-ad72-e142e8ab59c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.738121] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 741.738121] env[68233]: value = "task-2782122" [ 741.738121] env[68233]: _type = "Task" [ 741.738121] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 741.747523] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782122, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.768536] env[68233]: DEBUG nova.network.neutron [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Successfully created port: ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 742.004521] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782121, 'name': CreateVM_Task, 'duration_secs': 0.358193} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.010703] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 742.011965] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.012422] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.013113] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 742.013183] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a5ab76b-bfa5-4900-b1f8-c87dac828cf6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.022701] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 742.022701] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5297c8a1-1ef7-5b71-4091-cdfe701ad73d" [ 742.022701] env[68233]: _type = "Task" [ 742.022701] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.032603] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5297c8a1-1ef7-5b71-4091-cdfe701ad73d, 'name': SearchDatastore_Task, 'duration_secs': 0.010358} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.032603] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.036009] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 742.036009] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.036009] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.036009] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 742.036009] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df22fa7d-333f-4b14-9b5c-58952f146c15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.045043] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 742.045043] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 742.045271] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43e96248-8de1-4da6-8268-175b8fade6a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.052561] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 742.052561] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523a8f89-50c6-c0bd-206c-752550d2a867" [ 742.052561] env[68233]: _type = "Task" [ 742.052561] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.060470] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523a8f89-50c6-c0bd-206c-752550d2a867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.172425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 742.172781] env[68233]: DEBUG nova.compute.manager [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Inject network info {{(pid=68233) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 742.172973] env[68233]: DEBUG nova.compute.manager [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] network_info to inject: |[{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 742.178304] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Reconfiguring VM instance to set the machine id {{(pid=68233) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 742.179543] env[68233]: DEBUG oslo_concurrency.lockutils [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] Acquired lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.179543] env[68233]: DEBUG nova.network.neutron [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Refreshing network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.181346] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-767fb28e-9140-4235-ba33-19dcd08e0ae9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.202771] env[68233]: DEBUG oslo_vmware.api [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 742.202771] env[68233]: value = "task-2782123" [ 742.202771] env[68233]: _type = "Task" [ 742.202771] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.211682] env[68233]: DEBUG oslo_vmware.api [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782123, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.213813] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58aa3a5-7028-46ed-9c56-4deb1af50223 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.220233] env[68233]: DEBUG nova.compute.manager [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Received event network-changed-5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 742.220859] env[68233]: DEBUG nova.compute.manager [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Refreshing instance network info cache due to event network-changed-5be37989-5969-49ee-9609-29f58ff75d61. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 742.221149] env[68233]: DEBUG oslo_concurrency.lockutils [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] Acquiring lock "refresh_cache-2a88648c-f00d-4d7b-905d-e70c327e248a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.221298] env[68233]: DEBUG oslo_concurrency.lockutils [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] Acquired lock "refresh_cache-2a88648c-f00d-4d7b-905d-e70c327e248a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 742.221459] env[68233]: DEBUG nova.network.neutron [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Refreshing network info cache for port 5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 742.226731] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d9a8a3-8d45-44d8-ac1a-88d58438873a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.261221] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60b86a0-b95e-4602-b411-8c133ba4484d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.271835] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782122, 'name': ReconfigVM_Task, 'duration_secs': 0.317507} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.274071] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd/6c34d7ce-7bf1-4f88-812f-adc1eb5353dd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 742.275183] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1528b53-0c6f-4010-ba5d-26bc149c53b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.278015] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756199e9-1fb6-435b-9b7e-2c923f39f7d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.293677] env[68233]: DEBUG nova.compute.provider_tree [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.296233] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 742.296233] env[68233]: value = "task-2782124" [ 742.296233] env[68233]: _type = "Task" [ 742.296233] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.304815] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782124, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.513434] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.533120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "64b8997c-3246-4c97-a6c9-3a6a23645d38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.533120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.549108] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.549540] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.549540] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.549649] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.550790] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.550790] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.550790] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.550790] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.551059] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.551320] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.551572] env[68233]: DEBUG nova.virt.hardware [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.553581] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56de156-1df6-4e1c-8d84-977f3306e977 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.565637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.565948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.573711] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46bf4cf-1521-4569-b6ad-d40afec36e57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.578009] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523a8f89-50c6-c0bd-206c-752550d2a867, 'name': SearchDatastore_Task, 'duration_secs': 0.010192} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.582102] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d93bbcac-7a34-496c-8f64-9c09f767dfb4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.594287] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 742.594287] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520a2188-a2a2-89aa-ae99-78653b9daedd" [ 742.594287] env[68233]: _type = "Task" [ 742.594287] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.602262] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520a2188-a2a2-89aa-ae99-78653b9daedd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.713440] env[68233]: DEBUG oslo_vmware.api [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782123, 'name': ReconfigVM_Task, 'duration_secs': 0.14056} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.713720] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8c189bda-2431-4399-ba9d-08d8d9d8781d tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Reconfigured VM instance to set the machine id {{(pid=68233) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 742.800892] env[68233]: DEBUG nova.scheduler.client.report [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 742.814867] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782124, 'name': Rename_Task, 'duration_secs': 0.144864} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.815353] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 742.816174] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f02973f-29da-4216-8867-cb6e4804cbd6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.825229] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 742.825229] env[68233]: value = "task-2782125" [ 742.825229] env[68233]: _type = "Task" [ 742.825229] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.836702] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782125, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.108425] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520a2188-a2a2-89aa-ae99-78653b9daedd, 'name': SearchDatastore_Task, 'duration_secs': 0.024884} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.111161] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.111637] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2a88648c-f00d-4d7b-905d-e70c327e248a/2a88648c-f00d-4d7b-905d-e70c327e248a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 743.112238] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7dd9a71c-1a67-432e-aa63-a10230bb1747 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.122444] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 743.122444] env[68233]: value = "task-2782126" [ 743.122444] env[68233]: _type = "Task" [ 743.122444] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.132901] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.168469] env[68233]: DEBUG nova.network.neutron [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Updated VIF entry in instance network info cache for port 5be37989-5969-49ee-9609-29f58ff75d61. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 743.168912] env[68233]: DEBUG nova.network.neutron [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Updating instance_info_cache with network_info: [{"id": "5be37989-5969-49ee-9609-29f58ff75d61", "address": "fa:16:3e:93:33:1a", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5be37989-59", "ovs_interfaceid": "5be37989-5969-49ee-9609-29f58ff75d61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.201977] env[68233]: DEBUG nova.network.neutron [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updated VIF entry in instance network info cache for port cf641b62-960d-40ec-9fdd-3b4845dcf864. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 743.202418] env[68233]: DEBUG nova.network.neutron [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [{"id": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "address": "fa:16:3e:52:44:1b", "network": {"id": "0b928ae3-6037-4268-9c06-4e094ae0eac3", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1779341629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.144", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "91c6b7158b6c4082876f94b32495a113", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf641b62-96", "ovs_interfaceid": "cf641b62-960d-40ec-9fdd-3b4845dcf864", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.311064] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.819s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.311064] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 743.318301] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.748s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.320407] env[68233]: INFO nova.compute.claims [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.347154] env[68233]: DEBUG oslo_vmware.api [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782125, 'name': PowerOnVM_Task, 'duration_secs': 0.446054} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.347154] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 743.347154] env[68233]: INFO nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Took 8.71 seconds to spawn the instance on the hypervisor. [ 743.347154] env[68233]: DEBUG nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 743.348549] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d7d49ea-1648-4dfd-ae1b-57c622a85c54 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.636164] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49089} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.636164] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2a88648c-f00d-4d7b-905d-e70c327e248a/2a88648c-f00d-4d7b-905d-e70c327e248a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 743.636164] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 743.636164] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dbeb8063-4db6-49d2-be7d-f5b7df1edac0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.640788] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 743.640788] env[68233]: value = "task-2782127" [ 743.640788] env[68233]: _type = "Task" [ 743.640788] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 743.649530] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782127, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.671844] env[68233]: DEBUG oslo_concurrency.lockutils [req-64c569ba-7bb6-4f9e-9f68-27a40c5d4529 req-1ed15f28-061b-4a37-993b-037636e15539 service nova] Releasing lock "refresh_cache-2a88648c-f00d-4d7b-905d-e70c327e248a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.705123] env[68233]: DEBUG oslo_concurrency.lockutils [req-102b015e-50bd-4ea9-beed-19f00aeda09e req-3f8a9232-5836-4b37-8a4b-4dd9e973ddac service nova] Releasing lock "refresh_cache-75f58a50-7891-42df-8820-c997300a3159" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 743.831022] env[68233]: DEBUG nova.compute.utils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 743.832827] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 743.833133] env[68233]: DEBUG nova.network.neutron [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 743.878337] env[68233]: INFO nova.compute.manager [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Took 51.86 seconds to build instance. [ 743.884846] env[68233]: DEBUG nova.policy [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26e97604a73649a5bb2ca442bab0d2bf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '489301846eb44533a5fbab92d9da4bf6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 743.979325] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "75f58a50-7891-42df-8820-c997300a3159" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.979582] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "75f58a50-7891-42df-8820-c997300a3159" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.979796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "75f58a50-7891-42df-8820-c997300a3159-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.979977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "75f58a50-7891-42df-8820-c997300a3159-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.980160] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "75f58a50-7891-42df-8820-c997300a3159-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.983278] env[68233]: INFO nova.compute.manager [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Terminating instance [ 744.154405] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782127, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.29115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.154814] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 744.156695] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2cf40e-4cbf-4561-b2d5-9a8f4e14cf60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.183152] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Reconfiguring VM instance instance-00000029 to attach disk [datastore2] 2a88648c-f00d-4d7b-905d-e70c327e248a/2a88648c-f00d-4d7b-905d-e70c327e248a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 744.183152] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2411d4c-c2ba-4ac1-91b3-0cbb4369b480 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.200787] env[68233]: DEBUG nova.network.neutron [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Successfully created port: bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.210851] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 744.210851] env[68233]: value = "task-2782128" [ 744.210851] env[68233]: _type = "Task" [ 744.210851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.220620] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.336078] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 744.381494] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ba89d42a-afa4-4814-945f-e681fdf1fea4 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.566s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.496020] env[68233]: DEBUG nova.compute.manager [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 744.496020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 744.496020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30321867-4a42-4019-88fc-2f8e9a53de85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.508907] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 744.508907] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58093631-60a5-45b1-b95f-9d867615c578 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.514460] env[68233]: DEBUG oslo_vmware.api [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 744.514460] env[68233]: value = "task-2782129" [ 744.514460] env[68233]: _type = "Task" [ 744.514460] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.526536] env[68233]: DEBUG oslo_vmware.api [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782129, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.650788] env[68233]: DEBUG nova.network.neutron [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Successfully updated port: ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 744.720427] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.893019] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 744.941393] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd00d98-1e65-4d95-95ff-c08a3680ae13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.951251] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b3c206-7e22-469b-8c99-f4c19e93320d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.985397] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1be1bb-ebc4-4f24-af3e-0427c2c3b76a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.994869] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0f3d68-36a5-4681-bfaa-145a3840df3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.012128] env[68233]: DEBUG nova.compute.provider_tree [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.023964] env[68233]: DEBUG oslo_vmware.api [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782129, 'name': PowerOffVM_Task, 'duration_secs': 0.438121} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.024891] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 745.027012] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 745.027012] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9a19061-af73-45a4-be8e-c158d8e9429e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.113170] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 745.113460] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 745.113572] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Deleting the datastore file [datastore2] 75f58a50-7891-42df-8820-c997300a3159 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 745.113840] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95149716-a9b8-46d1-bcbe-b5fa606584ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.120723] env[68233]: DEBUG oslo_vmware.api [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for the task: (returnval){ [ 745.120723] env[68233]: value = "task-2782131" [ 745.120723] env[68233]: _type = "Task" [ 745.120723] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.128219] env[68233]: DEBUG oslo_vmware.api [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.159748] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "refresh_cache-c8fd5539-8add-45fe-a0ac-8767bf8a330e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.160142] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquired lock "refresh_cache-c8fd5539-8add-45fe-a0ac-8767bf8a330e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.160142] env[68233]: DEBUG nova.network.neutron [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 745.219887] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782128, 'name': ReconfigVM_Task, 'duration_secs': 0.748223} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.219887] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Reconfigured VM instance instance-00000029 to attach disk [datastore2] 2a88648c-f00d-4d7b-905d-e70c327e248a/2a88648c-f00d-4d7b-905d-e70c327e248a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 745.223533] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32e2c775-419c-473b-876f-40551be75dcb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.227877] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 745.227877] env[68233]: value = "task-2782132" [ 745.227877] env[68233]: _type = "Task" [ 745.227877] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.237548] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782132, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.348419] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 745.353450] env[68233]: DEBUG nova.compute.manager [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 745.353550] env[68233]: DEBUG nova.compute.manager [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing instance network info cache due to event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 745.354503] env[68233]: DEBUG oslo_concurrency.lockutils [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] Acquiring lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.354503] env[68233]: DEBUG oslo_concurrency.lockutils [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] Acquired lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.354503] env[68233]: DEBUG nova.network.neutron [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.393019] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 745.393019] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.393019] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 745.393019] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.393019] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 745.393488] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 745.393955] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 745.394300] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 745.397020] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 745.397020] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 745.397020] env[68233]: DEBUG nova.virt.hardware [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 745.399018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f69191-06b3-44d2-ad65-f0c1fc8c912b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.413079] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa2ad22-4a5e-4fd6-b153-c9f0213a8a74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.433429] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.515591] env[68233]: DEBUG nova.scheduler.client.report [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 745.638024] env[68233]: DEBUG oslo_vmware.api [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Task: {'id': task-2782131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16374} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.638024] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 745.638024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 745.638024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 745.638024] env[68233]: INFO nova.compute.manager [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] [instance: 75f58a50-7891-42df-8820-c997300a3159] Took 1.14 seconds to destroy the instance on the hypervisor. [ 745.638024] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 745.638024] env[68233]: DEBUG nova.compute.manager [-] [instance: 75f58a50-7891-42df-8820-c997300a3159] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 745.638024] env[68233]: DEBUG nova.network.neutron [-] [instance: 75f58a50-7891-42df-8820-c997300a3159] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 745.743746] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782132, 'name': Rename_Task, 'duration_secs': 0.368448} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.744140] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 745.744553] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-530ac21e-f503-4d65-b5dd-d68489d9287e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.751487] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 745.751487] env[68233]: value = "task-2782133" [ 745.751487] env[68233]: _type = "Task" [ 745.751487] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.760384] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.952235] env[68233]: DEBUG nova.network.neutron [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 746.022900] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.708s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.023504] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 746.031022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.394s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.031022] env[68233]: DEBUG nova.objects.instance [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lazy-loading 'resources' on Instance uuid 080ab438-269b-427a-9ee9-71c59d9c2a91 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 746.230513] env[68233]: DEBUG nova.network.neutron [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Updating instance_info_cache with network_info: [{"id": "ade59518-8bb1-4241-9622-856e6284b19f", "address": "fa:16:3e:d7:75:f4", "network": {"id": "50993831-346d-4def-9ea9-9558b12937a0", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2102301854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d920fb0b5ad9405ba979bcd84dd23948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapade59518-8b", "ovs_interfaceid": "ade59518-8bb1-4241-9622-856e6284b19f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.243296] env[68233]: DEBUG nova.network.neutron [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Successfully updated port: bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 746.267361] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.529302] env[68233]: DEBUG nova.compute.utils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 746.532888] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 746.533131] env[68233]: DEBUG nova.network.neutron [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 746.609689] env[68233]: DEBUG nova.policy [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '434654c75b9b4ddaaf3714b355c2a5bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd921fe0876de499dbc86529a00b2c6f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 746.734651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Releasing lock "refresh_cache-c8fd5539-8add-45fe-a0ac-8767bf8a330e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.734956] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Instance network_info: |[{"id": "ade59518-8bb1-4241-9622-856e6284b19f", "address": "fa:16:3e:d7:75:f4", "network": {"id": "50993831-346d-4def-9ea9-9558b12937a0", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2102301854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d920fb0b5ad9405ba979bcd84dd23948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapade59518-8b", "ovs_interfaceid": "ade59518-8bb1-4241-9622-856e6284b19f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 746.736320] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:75:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c6a4836-66dc-4e43-982b-f8fcd3f9989a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ade59518-8bb1-4241-9622-856e6284b19f', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 746.743418] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Creating folder: Project (d920fb0b5ad9405ba979bcd84dd23948). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.743706] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a83d3a7-eee5-4e40-a103-3d02d402b624 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.746107] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.746248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquired lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.746595] env[68233]: DEBUG nova.network.neutron [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 746.752747] env[68233]: DEBUG nova.network.neutron [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updated VIF entry in instance network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.753588] env[68233]: DEBUG nova.network.neutron [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.764156] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Created folder: Project (d920fb0b5ad9405ba979bcd84dd23948) in parent group-v559223. [ 746.764368] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Creating folder: Instances. Parent ref: group-v559341. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 746.765043] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e365cdcd-1f41-46f1-b0d0-8b60f16309a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.770454] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.784707] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Created folder: Instances in parent group-v559341. [ 746.784930] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 746.785133] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 746.785349] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2385871-a211-4329-ab9c-723e21d447b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.810508] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 746.810508] env[68233]: value = "task-2782136" [ 746.810508] env[68233]: _type = "Task" [ 746.810508] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.817706] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782136, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.040574] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 747.102648] env[68233]: DEBUG nova.network.neutron [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Successfully created port: ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.265240] env[68233]: DEBUG oslo_concurrency.lockutils [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] Releasing lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.265519] env[68233]: DEBUG nova.compute.manager [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Received event network-vif-plugged-ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 747.265676] env[68233]: DEBUG oslo_concurrency.lockutils [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] Acquiring lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.265871] env[68233]: DEBUG oslo_concurrency.lockutils [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.266306] env[68233]: DEBUG oslo_concurrency.lockutils [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.266559] env[68233]: DEBUG nova.compute.manager [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] No waiting events found dispatching network-vif-plugged-ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 747.266740] env[68233]: WARNING nova.compute.manager [req-1893f637-8bae-431a-9633-a52f21ca9b2c req-15193083-2705-4b62-ac97-269d4b22ec7d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Received unexpected event network-vif-plugged-ade59518-8bb1-4241-9622-856e6284b19f for instance with vm_state building and task_state spawning. [ 747.271832] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782133, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.283846] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f177d1-1eac-4066-8fa7-ce6de0a27f8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.291900] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608fa591-53b4-4097-a2c8-8adc59c1d35e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.328211] env[68233]: DEBUG nova.network.neutron [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 747.333783] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da30107a-de1e-4156-85c8-6b0287a597f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.342279] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782136, 'name': CreateVM_Task, 'duration_secs': 0.352421} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.344524] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 747.345379] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.345570] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.346177] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 747.347090] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f24d9a-9a8e-40aa-af71-f7b531daaef7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.351283] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0cae7f7-9c30-4b43-9fe7-542de1c8d15d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.363728] env[68233]: DEBUG nova.compute.provider_tree [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.368870] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 747.368870] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52114866-3356-7542-9350-a15bcd4bf00f" [ 747.368870] env[68233]: _type = "Task" [ 747.368870] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.378454] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52114866-3356-7542-9350-a15bcd4bf00f, 'name': SearchDatastore_Task, 'duration_secs': 0.010369} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.378454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.378646] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 747.380142] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.380142] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.380142] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 747.380142] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e866507a-d7aa-4561-ae9f-a5fb3055571a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.390152] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 747.390152] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 747.390152] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c2d8e10-0e4e-41fb-be1f-3c819d2b35c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.395736] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 747.395736] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ea12ed-7048-5635-fc54-27e322b70048" [ 747.395736] env[68233]: _type = "Task" [ 747.395736] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.405224] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ea12ed-7048-5635-fc54-27e322b70048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.574798] env[68233]: DEBUG nova.compute.manager [req-07088247-bf28-4615-948e-7b7218467124 req-76c95f97-53ba-4175-83f2-afc3b1acb05a service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Received event network-vif-plugged-bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 747.574798] env[68233]: DEBUG oslo_concurrency.lockutils [req-07088247-bf28-4615-948e-7b7218467124 req-76c95f97-53ba-4175-83f2-afc3b1acb05a service nova] Acquiring lock "876d428d-d5c9-422a-aba2-2d6c61b092db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.574798] env[68233]: DEBUG oslo_concurrency.lockutils [req-07088247-bf28-4615-948e-7b7218467124 req-76c95f97-53ba-4175-83f2-afc3b1acb05a service nova] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.575042] env[68233]: DEBUG oslo_concurrency.lockutils [req-07088247-bf28-4615-948e-7b7218467124 req-76c95f97-53ba-4175-83f2-afc3b1acb05a service nova] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.575375] env[68233]: DEBUG nova.compute.manager [req-07088247-bf28-4615-948e-7b7218467124 req-76c95f97-53ba-4175-83f2-afc3b1acb05a service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] No waiting events found dispatching network-vif-plugged-bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 747.575749] env[68233]: WARNING nova.compute.manager [req-07088247-bf28-4615-948e-7b7218467124 req-76c95f97-53ba-4175-83f2-afc3b1acb05a service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Received unexpected event network-vif-plugged-bc63fa79-ccc0-4ad9-b4df-185add5228eb for instance with vm_state building and task_state spawning. [ 747.629425] env[68233]: DEBUG nova.network.neutron [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Updating instance_info_cache with network_info: [{"id": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "address": "fa:16:3e:21:98:88", "network": {"id": "df9005ef-abd2-4fe3-98ae-8765a54c74ef", "bridge": "br-int", "label": "tempest-ServersTestJSON-409220955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "489301846eb44533a5fbab92d9da4bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc63fa79-cc", "ovs_interfaceid": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.630847] env[68233]: DEBUG nova.network.neutron [-] [instance: 75f58a50-7891-42df-8820-c997300a3159] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.767469] env[68233]: DEBUG oslo_vmware.api [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782133, 'name': PowerOnVM_Task, 'duration_secs': 1.65998} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.767742] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 747.767940] env[68233]: INFO nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Took 10.30 seconds to spawn the instance on the hypervisor. [ 747.768139] env[68233]: DEBUG nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 747.768942] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032ae738-ee95-4db6-9fc7-fe663d0e6e95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.868387] env[68233]: DEBUG nova.scheduler.client.report [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 747.907102] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ea12ed-7048-5635-fc54-27e322b70048, 'name': SearchDatastore_Task, 'duration_secs': 0.013495} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.907829] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fb71afb-8b6f-4a70-82e9-19d85ecc472c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.914031] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 747.914031] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526200b8-9976-febd-8a2d-643783425347" [ 747.914031] env[68233]: _type = "Task" [ 747.914031] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.922235] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526200b8-9976-febd-8a2d-643783425347, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.055458] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 748.080682] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.081358] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.081358] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.081358] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.081550] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.081650] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.081904] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.082134] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.082304] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.082486] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.083199] env[68233]: DEBUG nova.virt.hardware [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.087024] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750464f8-51bd-4356-a237-c6fa4eb4d32c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.092690] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a193c1-4bbe-46eb-b439-56d77e36e7aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.134081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Releasing lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.134452] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Instance network_info: |[{"id": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "address": "fa:16:3e:21:98:88", "network": {"id": "df9005ef-abd2-4fe3-98ae-8765a54c74ef", "bridge": "br-int", "label": "tempest-ServersTestJSON-409220955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "489301846eb44533a5fbab92d9da4bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc63fa79-cc", "ovs_interfaceid": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 748.134823] env[68233]: INFO nova.compute.manager [-] [instance: 75f58a50-7891-42df-8820-c997300a3159] Took 2.50 seconds to deallocate network for instance. [ 748.135229] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:98:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '77aa121f-8fb6-42f3-aaea-43addfe449b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc63fa79-ccc0-4ad9-b4df-185add5228eb', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.145929] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Creating folder: Project (489301846eb44533a5fbab92d9da4bf6). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.147694] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac961136-9ce8-463c-82ec-3c109782f24c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.163457] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Created folder: Project (489301846eb44533a5fbab92d9da4bf6) in parent group-v559223. [ 748.163814] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Creating folder: Instances. Parent ref: group-v559344. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 748.163905] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d306bdba-6205-4585-8672-dd17d75dd15d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.178226] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Created folder: Instances in parent group-v559344. [ 748.178226] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 748.178226] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.178226] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72d5631c-96ef-4a90-b71d-881f428614f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.201269] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.201269] env[68233]: value = "task-2782139" [ 748.201269] env[68233]: _type = "Task" [ 748.201269] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.209759] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782139, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.288148] env[68233]: INFO nova.compute.manager [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Took 49.11 seconds to build instance. [ 748.373799] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.346s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.376683] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.499s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 748.377051] env[68233]: DEBUG nova.objects.instance [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lazy-loading 'resources' on Instance uuid b056fbf4-4873-4ec9-905a-ad973c8fb27a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 748.406342] env[68233]: INFO nova.scheduler.client.report [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Deleted allocations for instance 080ab438-269b-427a-9ee9-71c59d9c2a91 [ 748.429187] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526200b8-9976-febd-8a2d-643783425347, 'name': SearchDatastore_Task, 'duration_secs': 0.01217} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.429358] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.429617] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c8fd5539-8add-45fe-a0ac-8767bf8a330e/c8fd5539-8add-45fe-a0ac-8767bf8a330e.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 748.429882] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5df0cdf-4d5d-4783-9b50-590837852bf7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.437088] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 748.437088] env[68233]: value = "task-2782140" [ 748.437088] env[68233]: _type = "Task" [ 748.437088] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.446309] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.505160] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Received event network-changed-ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 748.505160] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Refreshing instance network info cache due to event network-changed-ade59518-8bb1-4241-9622-856e6284b19f. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 748.506028] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Acquiring lock "refresh_cache-c8fd5539-8add-45fe-a0ac-8767bf8a330e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.506323] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Acquired lock "refresh_cache-c8fd5539-8add-45fe-a0ac-8767bf8a330e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.506611] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Refreshing network info cache for port ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.656386] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 748.694704] env[68233]: DEBUG nova.network.neutron [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Successfully updated port: ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 748.708997] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782139, 'name': CreateVM_Task, 'duration_secs': 0.386604} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.709312] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 748.710044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.710417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.710674] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 748.711667] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a65e07b9-ccc1-4fc4-9557-b5fd8afde32b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.717456] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 748.717456] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521f4d92-f3f1-10e8-b930-bd9c4821fd5f" [ 748.717456] env[68233]: _type = "Task" [ 748.717456] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.728329] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521f4d92-f3f1-10e8-b930-bd9c4821fd5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.790096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-db0fe668-6a19-4ff3-8fdb-0b1f80870343 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.985s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.924838] env[68233]: DEBUG oslo_concurrency.lockutils [None req-08d3ed70-ce24-4cc1-a6a1-93bf0edf4065 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "080ab438-269b-427a-9ee9-71c59d9c2a91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.457s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 748.956602] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782140, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.163941] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.163941] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.197970] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "refresh_cache-dcd8cca2-b62c-44a6-9e77-f336d2d39c09" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.197970] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "refresh_cache-dcd8cca2-b62c-44a6-9e77-f336d2d39c09" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.197970] env[68233]: DEBUG nova.network.neutron [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 749.229851] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521f4d92-f3f1-10e8-b930-bd9c4821fd5f, 'name': SearchDatastore_Task, 'duration_secs': 0.056612} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.232176] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.232482] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.232752] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.232906] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.233120] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.233547] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65fb8a0d-6818-49a3-8970-ea572ac60312 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.242765] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.242987] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.243718] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c01a223-99b0-4037-b3e9-c38be5662f9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.250439] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 749.250439] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dbdf1b-5f67-463b-a3e0-94897b6a73f1" [ 749.250439] env[68233]: _type = "Task" [ 749.250439] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.259204] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dbdf1b-5f67-463b-a3e0-94897b6a73f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.262168] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Updated VIF entry in instance network info cache for port ade59518-8bb1-4241-9622-856e6284b19f. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.262466] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Updating instance_info_cache with network_info: [{"id": "ade59518-8bb1-4241-9622-856e6284b19f", "address": "fa:16:3e:d7:75:f4", "network": {"id": "50993831-346d-4def-9ea9-9558b12937a0", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-2102301854-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d920fb0b5ad9405ba979bcd84dd23948", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapade59518-8b", "ovs_interfaceid": "ade59518-8bb1-4241-9622-856e6284b19f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.295565] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 749.450049] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559589} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.452696] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c8fd5539-8add-45fe-a0ac-8767bf8a330e/c8fd5539-8add-45fe-a0ac-8767bf8a330e.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 749.452918] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 749.453382] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-005d8508-561d-45ea-96e2-bea2fdb9efee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.460504] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 749.460504] env[68233]: value = "task-2782141" [ 749.460504] env[68233]: _type = "Task" [ 749.460504] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.470725] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782141, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.491419] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1413bbc-b612-4206-83b1-8915907e929d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.499378] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61d653af-3cf4-4044-88b7-641dd6445fc3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.533350] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945d37fc-a4b6-459b-a0c2-bdec5ccd6a74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.542044] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9e99c6-67ea-43d3-a7e2-f0926920f518 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.555764] env[68233]: DEBUG nova.compute.provider_tree [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.729987] env[68233]: DEBUG nova.network.neutron [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 749.761947] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dbdf1b-5f67-463b-a3e0-94897b6a73f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011159} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.762673] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74fc9a15-0767-4093-b79d-99647ae58e89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.765206] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Releasing lock "refresh_cache-c8fd5539-8add-45fe-a0ac-8767bf8a330e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.765353] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 749.765518] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing instance network info cache due to event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 749.765718] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Acquiring lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.765855] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Acquired lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.766034] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.772364] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 749.772364] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f2f6cb-1511-40e0-347d-7c19aa676a7d" [ 749.772364] env[68233]: _type = "Task" [ 749.772364] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.781505] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f2f6cb-1511-40e0-347d-7c19aa676a7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.818623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.874860] env[68233]: DEBUG nova.network.neutron [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Updating instance_info_cache with network_info: [{"id": "ab957e11-50f4-459b-92be-c9bd72946850", "address": "fa:16:3e:23:85:50", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab957e11-50", "ovs_interfaceid": "ab957e11-50f4-459b-92be-c9bd72946850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.973629] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782141, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061474} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.973850] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 749.974643] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bc69e4-4a7c-48d8-af81-dcff2de30f52 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.999062] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Reconfiguring VM instance instance-0000002a to attach disk [datastore2] c8fd5539-8add-45fe-a0ac-8767bf8a330e/c8fd5539-8add-45fe-a0ac-8767bf8a330e.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 749.999379] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b9c7ebb-9e3e-475f-a989-d5e90ab78ddb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.022552] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 750.022552] env[68233]: value = "task-2782142" [ 750.022552] env[68233]: _type = "Task" [ 750.022552] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.032327] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782142, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.059639] env[68233]: DEBUG nova.scheduler.client.report [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 750.284301] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f2f6cb-1511-40e0-347d-7c19aa676a7d, 'name': SearchDatastore_Task, 'duration_secs': 0.009125} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.284914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.284914] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 876d428d-d5c9-422a-aba2-2d6c61b092db/876d428d-d5c9-422a-aba2-2d6c61b092db.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.285146] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9bb330d6-8a82-40fd-9892-77e4a94d41a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.291738] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 750.291738] env[68233]: value = "task-2782143" [ 750.291738] env[68233]: _type = "Task" [ 750.291738] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.299735] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.379131] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "refresh_cache-dcd8cca2-b62c-44a6-9e77-f336d2d39c09" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.379131] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Instance network_info: |[{"id": "ab957e11-50f4-459b-92be-c9bd72946850", "address": "fa:16:3e:23:85:50", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab957e11-50", "ovs_interfaceid": "ab957e11-50f4-459b-92be-c9bd72946850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 750.379420] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:85:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab957e11-50f4-459b-92be-c9bd72946850', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 750.388464] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 750.388962] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 750.389897] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4077bbb-e4b4-42c7-a91b-b3025591ad0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.415303] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 750.415303] env[68233]: value = "task-2782144" [ 750.415303] env[68233]: _type = "Task" [ 750.415303] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.425154] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782144, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.536746] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782142, 'name': ReconfigVM_Task, 'duration_secs': 0.307612} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.537390] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Reconfigured VM instance instance-0000002a to attach disk [datastore2] c8fd5539-8add-45fe-a0ac-8767bf8a330e/c8fd5539-8add-45fe-a0ac-8767bf8a330e.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 750.538261] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0d3ce29-808f-4efd-acf8-84bf8c3a77fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.546259] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 750.546259] env[68233]: value = "task-2782145" [ 750.546259] env[68233]: _type = "Task" [ 750.546259] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.556830] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782145, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.565519] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.569219] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.993s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.571368] env[68233]: INFO nova.compute.claims [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.593672] env[68233]: INFO nova.scheduler.client.report [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Deleted allocations for instance b056fbf4-4873-4ec9-905a-ad973c8fb27a [ 750.611685] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updated VIF entry in instance network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.611685] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.731026] env[68233]: DEBUG nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Received event network-changed-bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 750.731026] env[68233]: DEBUG nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Refreshing instance network info cache due to event network-changed-bc63fa79-ccc0-4ad9-b4df-185add5228eb. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 750.731026] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Acquiring lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.731026] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Acquired lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.731026] env[68233]: DEBUG nova.network.neutron [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Refreshing network info cache for port bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 750.744188] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b640cc5a-1f83-46ff-ba84-ce1ecd3e8a87 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.755556] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Suspending the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 750.756014] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3ee79354-d0a0-443a-90c2-f2f16969e98c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.764921] env[68233]: DEBUG oslo_vmware.api [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 750.764921] env[68233]: value = "task-2782146" [ 750.764921] env[68233]: _type = "Task" [ 750.764921] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.775880] env[68233]: DEBUG oslo_vmware.api [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782146, 'name': SuspendVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.803823] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782143, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.933032] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782144, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.060047] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782145, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.106393] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d0ea5a5-26d5-49fe-99ab-020f0b0d6269 tempest-DeleteServersAdminTestJSON-945753015 tempest-DeleteServersAdminTestJSON-945753015-project-member] Lock "b056fbf4-4873-4ec9-905a-ad973c8fb27a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.794s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.112844] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Releasing lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.113310] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Received event network-changed-f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.113610] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Refreshing instance network info cache due to event network-changed-f7568651-e038-4f28-85d3-597b8faad3fb. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 751.114277] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Acquiring lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.114680] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Acquired lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.115040] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Refreshing network info cache for port f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.206978] env[68233]: DEBUG nova.compute.manager [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Received event network-changed-95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.207308] env[68233]: DEBUG nova.compute.manager [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Refreshing instance network info cache due to event network-changed-95df51ae-391f-43ee-976f-70e2a4bb769a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 751.207623] env[68233]: DEBUG oslo_concurrency.lockutils [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] Acquiring lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.207869] env[68233]: DEBUG oslo_concurrency.lockutils [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] Acquired lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.208279] env[68233]: DEBUG nova.network.neutron [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Refreshing network info cache for port 95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.279957] env[68233]: DEBUG oslo_vmware.api [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782146, 'name': SuspendVM_Task} progress is 54%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.306967] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520201} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.306967] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 876d428d-d5c9-422a-aba2-2d6c61b092db/876d428d-d5c9-422a-aba2-2d6c61b092db.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.306967] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.306967] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a1c9627-915c-4a7e-bc89-3df6f1c4d679 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.314218] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 751.314218] env[68233]: value = "task-2782147" [ 751.314218] env[68233]: _type = "Task" [ 751.314218] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.323989] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.429762] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782144, 'name': CreateVM_Task, 'duration_secs': 0.766567} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.430406] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.431258] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.431623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.432080] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 751.432867] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88f448a7-e8c6-4056-a058-1942db537dd8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.437447] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 751.437447] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527fb6db-52ff-91c4-e685-7722d1f85577" [ 751.437447] env[68233]: _type = "Task" [ 751.437447] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.446446] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527fb6db-52ff-91c4-e685-7722d1f85577, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.476341] env[68233]: DEBUG nova.network.neutron [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Updated VIF entry in instance network info cache for port bc63fa79-ccc0-4ad9-b4df-185add5228eb. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.476341] env[68233]: DEBUG nova.network.neutron [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Updating instance_info_cache with network_info: [{"id": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "address": "fa:16:3e:21:98:88", "network": {"id": "df9005ef-abd2-4fe3-98ae-8765a54c74ef", "bridge": "br-int", "label": "tempest-ServersTestJSON-409220955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "489301846eb44533a5fbab92d9da4bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc63fa79-cc", "ovs_interfaceid": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.504308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "86528c8b-b51e-480d-a7bf-013d990d51ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.504308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.504308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.504308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.504308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.506855] env[68233]: INFO nova.compute.manager [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Terminating instance [ 751.560031] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782145, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.783232] env[68233]: DEBUG oslo_vmware.api [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782146, 'name': SuspendVM_Task} progress is 54%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.824374] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.158272} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.827274] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.828430] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d4d0d9-e470-42c7-a822-d3de23ae8e49 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.859287] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Reconfiguring VM instance instance-0000002b to attach disk [datastore2] 876d428d-d5c9-422a-aba2-2d6c61b092db/876d428d-d5c9-422a-aba2-2d6c61b092db.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.868172] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b5b06df-f84d-4d42-8ce7-655e7a72aec3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.891087] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 751.891087] env[68233]: value = "task-2782148" [ 751.891087] env[68233]: _type = "Task" [ 751.891087] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.899431] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.922626] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updated VIF entry in instance network info cache for port f7568651-e038-4f28-85d3-597b8faad3fb. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 751.923746] env[68233]: DEBUG nova.network.neutron [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [{"id": "f7568651-e038-4f28-85d3-597b8faad3fb", "address": "fa:16:3e:e9:b0:f2", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7568651-e0", "ovs_interfaceid": "f7568651-e038-4f28-85d3-597b8faad3fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.950874] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527fb6db-52ff-91c4-e685-7722d1f85577, 'name': SearchDatastore_Task, 'duration_secs': 0.009523} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.955017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.955331] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 751.955637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.955850] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.956481] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 751.957830] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b71cd3a-b741-4b27-94b8-097732500989 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.966574] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 751.967166] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 751.968261] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-641599a0-78b4-491c-9269-d61b1396f617 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.975543] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 751.975543] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c5f041-0ac8-7ce6-5b08-24da947491c8" [ 751.975543] env[68233]: _type = "Task" [ 751.975543] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.979623] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Releasing lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.979877] env[68233]: DEBUG nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Received event network-vif-plugged-ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.980094] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Acquiring lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.980424] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.980526] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.980696] env[68233]: DEBUG nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] No waiting events found dispatching network-vif-plugged-ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 751.980860] env[68233]: WARNING nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Received unexpected event network-vif-plugged-ab957e11-50f4-459b-92be-c9bd72946850 for instance with vm_state building and task_state spawning. [ 751.981041] env[68233]: DEBUG nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Received event network-changed-ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 751.981212] env[68233]: DEBUG nova.compute.manager [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Refreshing instance network info cache due to event network-changed-ab957e11-50f4-459b-92be-c9bd72946850. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 751.981406] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Acquiring lock "refresh_cache-dcd8cca2-b62c-44a6-9e77-f336d2d39c09" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.981542] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Acquired lock "refresh_cache-dcd8cca2-b62c-44a6-9e77-f336d2d39c09" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.981707] env[68233]: DEBUG nova.network.neutron [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Refreshing network info cache for port ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.987528] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c5f041-0ac8-7ce6-5b08-24da947491c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.011308] env[68233]: DEBUG nova.compute.manager [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 752.011536] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 752.012435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad96a751-17c0-4d9f-bbb5-cb31d2277d2c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.024018] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 752.024018] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9030ea55-a776-4b71-873e-cc9be0cc6e0f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.028835] env[68233]: DEBUG oslo_vmware.api [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 752.028835] env[68233]: value = "task-2782149" [ 752.028835] env[68233]: _type = "Task" [ 752.028835] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.037638] env[68233]: DEBUG oslo_vmware.api [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782149, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.058202] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782145, 'name': Rename_Task, 'duration_secs': 1.362763} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.062581] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.065600] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55d6af9c-7fe9-41ff-96a4-3b868c055db0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.072471] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 752.072471] env[68233]: value = "task-2782150" [ 752.072471] env[68233]: _type = "Task" [ 752.072471] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.083199] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.278892] env[68233]: DEBUG oslo_vmware.api [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782146, 'name': SuspendVM_Task, 'duration_secs': 1.113938} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.279629] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Suspended the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 752.279973] env[68233]: DEBUG nova.compute.manager [None req-f0b1e3a8-9cc6-4e6e-b5f5-aaa608cf15a3 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 752.281254] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6eda82-a20e-4ff6-99a2-c591470b3312 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.292929] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc66b36-ab19-494a-b118-7cce543800e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.304507] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba43c72-09ba-41a6-b12a-5914a01cc5b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.336201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b8cf70-d4b7-4b75-92f8-2b68577c810f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.343141] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e8847f-c568-4327-bce3-64cd6e6185f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.358656] env[68233]: DEBUG nova.compute.provider_tree [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.400545] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782148, 'name': ReconfigVM_Task, 'duration_secs': 0.361706} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.401360] env[68233]: DEBUG nova.network.neutron [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updated VIF entry in instance network info cache for port 95df51ae-391f-43ee-976f-70e2a4bb769a. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.401685] env[68233]: DEBUG nova.network.neutron [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updating instance_info_cache with network_info: [{"id": "95df51ae-391f-43ee-976f-70e2a4bb769a", "address": "fa:16:3e:24:9e:48", "network": {"id": "2e377502-5ec6-4c6f-9049-a2edcac7bac1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-584462734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.217", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5c12a9e11dd403dbe6bdc1c7793040a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95df51ae-39", "ovs_interfaceid": "95df51ae-391f-43ee-976f-70e2a4bb769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.406023] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Reconfigured VM instance instance-0000002b to attach disk [datastore2] 876d428d-d5c9-422a-aba2-2d6c61b092db/876d428d-d5c9-422a-aba2-2d6c61b092db.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.406023] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a0994b7-7fa9-46df-9693-3cbdf247b0a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.409459] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 752.409459] env[68233]: value = "task-2782151" [ 752.409459] env[68233]: _type = "Task" [ 752.409459] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.421923] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782151, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.426530] env[68233]: DEBUG oslo_concurrency.lockutils [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] Releasing lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.426767] env[68233]: DEBUG nova.compute.manager [req-111f18a2-29db-4a8b-a89e-437d896e2b31 req-4eb776fb-6333-401a-9013-b84b91d028d1 service nova] [instance: 75f58a50-7891-42df-8820-c997300a3159] Received event network-vif-deleted-cf641b62-960d-40ec-9fdd-3b4845dcf864 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 752.492281] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c5f041-0ac8-7ce6-5b08-24da947491c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010864} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.494295] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2431a673-072a-406d-a6cd-006f9fa4a69e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.499774] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 752.499774] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2dcc7-751b-273c-063a-bd5c1b037286" [ 752.499774] env[68233]: _type = "Task" [ 752.499774] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.509692] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2dcc7-751b-273c-063a-bd5c1b037286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.538767] env[68233]: DEBUG oslo_vmware.api [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782149, 'name': PowerOffVM_Task, 'duration_secs': 0.230523} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.539076] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 752.539256] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 752.539508] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-861756e2-2431-4700-baf5-bf446bf30382 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.582850] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782150, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.602119] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 752.602346] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 752.602514] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Deleting the datastore file [datastore2] 86528c8b-b51e-480d-a7bf-013d990d51ca {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.602788] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3dc76004-e4d2-46dc-af8d-0c6d8cdcb088 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.609249] env[68233]: DEBUG oslo_vmware.api [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 752.609249] env[68233]: value = "task-2782153" [ 752.609249] env[68233]: _type = "Task" [ 752.609249] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.616815] env[68233]: DEBUG oslo_vmware.api [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782153, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.861905] env[68233]: DEBUG nova.scheduler.client.report [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 752.904855] env[68233]: DEBUG oslo_concurrency.lockutils [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] Releasing lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.905250] env[68233]: DEBUG nova.compute.manager [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Received event network-changed-f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 752.905487] env[68233]: DEBUG nova.compute.manager [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Refreshing instance network info cache due to event network-changed-f7568651-e038-4f28-85d3-597b8faad3fb. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 752.905782] env[68233]: DEBUG oslo_concurrency.lockutils [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] Acquiring lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.905982] env[68233]: DEBUG oslo_concurrency.lockutils [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] Acquired lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.906226] env[68233]: DEBUG nova.network.neutron [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Refreshing network info cache for port f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 752.922781] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782151, 'name': Rename_Task, 'duration_secs': 0.134311} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.923239] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.923503] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24f9050d-4ab3-482f-b96d-1179feaf34f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.931271] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 752.931271] env[68233]: value = "task-2782154" [ 752.931271] env[68233]: _type = "Task" [ 752.931271] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.940411] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.954790] env[68233]: DEBUG nova.network.neutron [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Updated VIF entry in instance network info cache for port ab957e11-50f4-459b-92be-c9bd72946850. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.955317] env[68233]: DEBUG nova.network.neutron [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Updating instance_info_cache with network_info: [{"id": "ab957e11-50f4-459b-92be-c9bd72946850", "address": "fa:16:3e:23:85:50", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab957e11-50", "ovs_interfaceid": "ab957e11-50f4-459b-92be-c9bd72946850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.009618] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2dcc7-751b-273c-063a-bd5c1b037286, 'name': SearchDatastore_Task, 'duration_secs': 0.02239} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.009901] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.010187] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] dcd8cca2-b62c-44a6-9e77-f336d2d39c09/dcd8cca2-b62c-44a6-9e77-f336d2d39c09.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.010473] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca216faa-c60a-43f0-a50e-8a2f283a55d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.016498] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 753.016498] env[68233]: value = "task-2782155" [ 753.016498] env[68233]: _type = "Task" [ 753.016498] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.025972] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782155, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.083246] env[68233]: DEBUG oslo_vmware.api [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782150, 'name': PowerOnVM_Task, 'duration_secs': 0.816762} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.084302] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.084302] env[68233]: INFO nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Took 10.57 seconds to spawn the instance on the hypervisor. [ 753.084302] env[68233]: DEBUG nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.084727] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b30d5f-8c58-481b-996d-889d0355fbbb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.121911] env[68233]: DEBUG oslo_vmware.api [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782153, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294166} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.124342] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 753.124342] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 753.124342] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 753.124342] env[68233]: INFO nova.compute.manager [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Took 1.11 seconds to destroy the instance on the hypervisor. [ 753.124342] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 753.125117] env[68233]: DEBUG nova.compute.manager [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 753.125184] env[68233]: DEBUG nova.network.neutron [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 753.366876] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 753.367436] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 753.371041] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.225s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 753.372606] env[68233]: INFO nova.compute.claims [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.445858] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782154, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.458488] env[68233]: DEBUG oslo_concurrency.lockutils [req-b4d3651f-16a8-49be-be06-2ee2a4809782 req-28648683-293e-49be-858a-c2c4a70f7af9 service nova] Releasing lock "refresh_cache-dcd8cca2-b62c-44a6-9e77-f336d2d39c09" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.528601] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782155, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.605722] env[68233]: INFO nova.compute.manager [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Took 39.59 seconds to build instance. [ 753.667400] env[68233]: DEBUG nova.network.neutron [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updated VIF entry in instance network info cache for port f7568651-e038-4f28-85d3-597b8faad3fb. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 753.667768] env[68233]: DEBUG nova.network.neutron [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [{"id": "f7568651-e038-4f28-85d3-597b8faad3fb", "address": "fa:16:3e:e9:b0:f2", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7568651-e0", "ovs_interfaceid": "f7568651-e038-4f28-85d3-597b8faad3fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.882679] env[68233]: DEBUG nova.compute.utils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 753.885883] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 753.885883] env[68233]: DEBUG nova.network.neutron [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 753.927932] env[68233]: DEBUG nova.policy [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43c48242abf540fe99d95f3d2df541ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baab6817c97645bcae2e08502b7f96db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 753.945684] env[68233]: DEBUG oslo_vmware.api [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782154, 'name': PowerOnVM_Task, 'duration_secs': 0.957555} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.946347] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.946795] env[68233]: INFO nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Took 8.60 seconds to spawn the instance on the hypervisor. [ 753.948073] env[68233]: DEBUG nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.948073] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694b9594-e7fa-4e6e-b1be-224ca2e8308b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.010196] env[68233]: DEBUG nova.network.neutron [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.032289] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782155, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.845028} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.032289] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] dcd8cca2-b62c-44a6-9e77-f336d2d39c09/dcd8cca2-b62c-44a6-9e77-f336d2d39c09.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.032533] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.032772] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-463551dd-6183-4dc3-9ee7-bc07cece0150 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.039948] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 754.039948] env[68233]: value = "task-2782156" [ 754.039948] env[68233]: _type = "Task" [ 754.039948] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.048574] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782156, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.110865] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62dbf20f-0052-4ace-8e71-1bbe93d84391 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.198s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.143304] env[68233]: DEBUG nova.compute.manager [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 754.143603] env[68233]: DEBUG nova.compute.manager [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing instance network info cache due to event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 754.143981] env[68233]: DEBUG oslo_concurrency.lockutils [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] Acquiring lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.143981] env[68233]: DEBUG oslo_concurrency.lockutils [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] Acquired lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.144738] env[68233]: DEBUG nova.network.neutron [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 754.170840] env[68233]: DEBUG oslo_concurrency.lockutils [req-482ff405-21a2-4565-b8c0-023415cdf067 req-d9878091-b026-46ad-ae7d-8a10b37160b2 service nova] Releasing lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.225853] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquiring lock "3c9b701e-6461-45e3-8654-3291c5a487b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.226141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.226355] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquiring lock "3c9b701e-6461-45e3-8654-3291c5a487b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.226804] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.226987] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.229566] env[68233]: INFO nova.compute.manager [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Terminating instance [ 754.239232] env[68233]: DEBUG nova.network.neutron [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Successfully created port: f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.392673] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 754.477559] env[68233]: INFO nova.compute.manager [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Took 39.35 seconds to build instance. [ 754.513355] env[68233]: INFO nova.compute.manager [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Took 1.39 seconds to deallocate network for instance. [ 754.554012] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782156, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066997} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.554012] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.554758] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f22988-9e75-4c96-a30e-577eccf1ada3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.581225] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] dcd8cca2-b62c-44a6-9e77-f336d2d39c09/dcd8cca2-b62c-44a6-9e77-f336d2d39c09.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.585145] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c4217a49-b0b5-448f-bfb8-a1b79c45dca2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.607255] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 754.607255] env[68233]: value = "task-2782157" [ 754.607255] env[68233]: _type = "Task" [ 754.607255] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.615751] env[68233]: DEBUG nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 754.618496] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782157, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.676457] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "da2a5acb-0861-4225-a6b4-324482c480ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.676621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "da2a5acb-0861-4225-a6b4-324482c480ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.734852] env[68233]: DEBUG nova.compute.manager [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 754.734852] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 754.735124] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a8f23cce-627b-46f4-b8c3-07f3e0359334 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.743380] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 754.743380] env[68233]: value = "task-2782158" [ 754.743380] env[68233]: _type = "Task" [ 754.743380] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.754684] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.986220] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce5f1805-2bb6-4013-a691-d19d0b027d3f tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.361s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.020842] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.113350] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66111ddd-3868-4164-8d05-7902d92994b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.121211] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782157, 'name': ReconfigVM_Task, 'duration_secs': 0.424768} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.125625] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Reconfigured VM instance instance-0000002c to attach disk [datastore2] dcd8cca2-b62c-44a6-9e77-f336d2d39c09/dcd8cca2-b62c-44a6-9e77-f336d2d39c09.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.128351] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62c84114-ad0a-40b0-a27b-2f0fd6f096bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.132576] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a088f0c-d116-46b5-88d8-0ea12f67e57a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.172329] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.172329] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8224a013-65bd-4897-86d6-4384798abe29 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.174310] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 755.174310] env[68233]: value = "task-2782159" [ 755.174310] env[68233]: _type = "Task" [ 755.174310] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.181294] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9a6941-67e4-40df-bb1b-0d6229b886c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.188547] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782159, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.199734] env[68233]: DEBUG nova.compute.provider_tree [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 755.254793] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782158, 'name': PowerOffVM_Task, 'duration_secs': 0.43125} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.255066] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 755.255364] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 755.255476] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559266', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'name': 'volume-9bb63a6b-3e52-4693-a250-876762d38f26', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c9b701e-6461-45e3-8654-3291c5a487b9', 'attached_at': '', 'detached_at': '', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'serial': '9bb63a6b-3e52-4693-a250-876762d38f26'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 755.259285] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3195809c-c0a3-4ab8-aafc-bf548cba7863 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.275110] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73105247-f71e-4108-93c2-66f06edd6604 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.279918] env[68233]: DEBUG nova.network.neutron [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updated VIF entry in instance network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 755.280286] env[68233]: DEBUG nova.network.neutron [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.284108] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b7bf79-0dc0-4da7-85d1-14c0478a0f20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.305538] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd9c79d-6c5a-41a9-939b-a1cf7016d4d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.319186] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] The volume has not been displaced from its original location: [datastore2] volume-9bb63a6b-3e52-4693-a250-876762d38f26/volume-9bb63a6b-3e52-4693-a250-876762d38f26.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 755.325213] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Reconfiguring VM instance instance-0000001e to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 755.325911] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-221a12e5-f0a7-4faa-a459-83ef792c90eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.343519] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 755.343519] env[68233]: value = "task-2782160" [ 755.343519] env[68233]: _type = "Task" [ 755.343519] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.353980] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782160, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.406595] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 755.432423] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 755.432423] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.432423] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 755.432667] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.432667] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 755.433685] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 755.433685] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 755.433685] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 755.433685] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 755.433685] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 755.433685] env[68233]: DEBUG nova.virt.hardware [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 755.434468] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a8f4ac-3f9b-4347-94b1-fec84356cb9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.442978] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6c6b9c-cabd-4959-af46-e9b5aa22169c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.488851] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.687039] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782159, 'name': Rename_Task, 'duration_secs': 0.177477} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.687404] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.687718] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58379560-63a6-49f8-837a-4a37ba5b30ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.694419] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 755.694419] env[68233]: value = "task-2782161" [ 755.694419] env[68233]: _type = "Task" [ 755.694419] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.707400] env[68233]: DEBUG nova.scheduler.client.report [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 755.711101] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782161, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.761209] env[68233]: DEBUG nova.network.neutron [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Successfully updated port: f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.784289] env[68233]: DEBUG oslo_concurrency.lockutils [req-c99bb3f3-1ff2-4f40-829b-b7cf9fdf09c2 req-585635a6-7493-4021-8dc2-5b4f6224d91b service nova] Releasing lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.854080] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782160, 'name': ReconfigVM_Task, 'duration_secs': 0.304525} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.854365] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Reconfigured VM instance instance-0000001e to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 755.859728] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cde925fb-9d72-4522-b7f4-049ebc8c79bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.876908] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 755.876908] env[68233]: value = "task-2782162" [ 755.876908] env[68233]: _type = "Task" [ 755.876908] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.887665] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782162, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.910403] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "interface-c8fd5539-8add-45fe-a0ac-8767bf8a330e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.910759] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "interface-c8fd5539-8add-45fe-a0ac-8767bf8a330e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.911105] env[68233]: DEBUG nova.objects.instance [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lazy-loading 'flavor' on Instance uuid c8fd5539-8add-45fe-a0ac-8767bf8a330e {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 756.009170] env[68233]: DEBUG nova.compute.manager [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.009170] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87b905d-0a51-4c74-bfdc-b46882e30f49 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.029598] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.204393] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782161, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.213455] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.842s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.213976] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 756.217306] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.472s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.217539] env[68233]: DEBUG nova.objects.instance [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lazy-loading 'resources' on Instance uuid 5ed44950-8e9b-4f42-9611-d5bff01dc905 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 756.266815] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "refresh_cache-abdf9de2-8563-4a31-91a3-0c18b0387533" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.266983] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "refresh_cache-abdf9de2-8563-4a31-91a3-0c18b0387533" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.267188] env[68233]: DEBUG nova.network.neutron [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.387133] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782162, 'name': ReconfigVM_Task, 'duration_secs': 0.186596} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.387705] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559266', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'name': 'volume-9bb63a6b-3e52-4693-a250-876762d38f26', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '3c9b701e-6461-45e3-8654-3291c5a487b9', 'attached_at': '', 'detached_at': '', 'volume_id': '9bb63a6b-3e52-4693-a250-876762d38f26', 'serial': '9bb63a6b-3e52-4693-a250-876762d38f26'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 756.387904] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.388523] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b277cb1-0fa1-4abf-b720-6884c9bfce9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.395178] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.395597] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9921f7a-0cf2-43a3-b3fd-22d3c5370736 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.414733] env[68233]: DEBUG nova.objects.instance [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lazy-loading 'pci_requests' on Instance uuid c8fd5539-8add-45fe-a0ac-8767bf8a330e {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 756.466561] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.466828] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.466945] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Deleting the datastore file [datastore2] 3c9b701e-6461-45e3-8654-3291c5a487b9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.467226] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc10c1bb-8d08-4bed-bd09-924910e83713 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.474510] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for the task: (returnval){ [ 756.474510] env[68233]: value = "task-2782164" [ 756.474510] env[68233]: _type = "Task" [ 756.474510] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.482992] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782164, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.528209] env[68233]: INFO nova.compute.manager [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] instance snapshotting [ 756.528531] env[68233]: WARNING nova.compute.manager [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 756.532956] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c755f39-1575-4fa7-b2ba-06d80b53ffef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.555693] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1265912b-cdd4-43ae-b013-0e55496ec8fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.704673] env[68233]: DEBUG oslo_vmware.api [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782161, 'name': PowerOnVM_Task, 'duration_secs': 0.54351} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.705072] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.705171] env[68233]: INFO nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Took 8.65 seconds to spawn the instance on the hypervisor. [ 756.705337] env[68233]: DEBUG nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.706136] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aeb00c-1c8c-419f-b059-4625f4c38b85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.718874] env[68233]: DEBUG nova.compute.utils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 756.720047] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 756.720211] env[68233]: DEBUG nova.network.neutron [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.770835] env[68233]: DEBUG nova.policy [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da4cb00bd4c3405c88d8616b66b71e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14d2a0ead80a4efba8420023c31f8f11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 756.818705] env[68233]: DEBUG nova.network.neutron [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.917426] env[68233]: DEBUG nova.objects.base [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 756.917643] env[68233]: DEBUG nova.network.neutron [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 756.984993] env[68233]: DEBUG oslo_vmware.api [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Task: {'id': task-2782164, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131468} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.985440] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 756.985633] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 756.985810] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 756.986125] env[68233]: INFO nova.compute.manager [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Took 2.25 seconds to destroy the instance on the hypervisor. [ 756.986390] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 756.986630] env[68233]: DEBUG nova.compute.manager [-] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 756.986809] env[68233]: DEBUG nova.network.neutron [-] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.067168] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 757.069821] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6b9dc864-0bb7-4506-b627-b7851a2224f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.078899] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 757.078899] env[68233]: value = "task-2782165" [ 757.078899] env[68233]: _type = "Task" [ 757.078899] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.086145] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd4fe57b-101f-4648-ab06-6920f5c52410 tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "interface-c8fd5539-8add-45fe-a0ac-8767bf8a330e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.175s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.088410] env[68233]: DEBUG nova.network.neutron [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Updating instance_info_cache with network_info: [{"id": "f6902fc8-b8ad-4c4c-8056-c9fbf48669b9", "address": "fa:16:3e:14:3b:db", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6902fc8-b8", "ovs_interfaceid": "f6902fc8-b8ad-4c4c-8056-c9fbf48669b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.095853] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782165, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.161797] env[68233]: DEBUG nova.network.neutron [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Successfully created port: f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 757.224382] env[68233]: INFO nova.compute.manager [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Took 41.67 seconds to build instance. [ 757.225485] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 757.363682] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a98f0f-b259-4b67-b399-56bf9e35b11a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.372778] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f39ac9-d63c-4251-8e30-e5dbfa9116f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.414439] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3749710-d022-4b81-a533-8a6db6cb48ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.423464] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e77de70-2bbb-40bd-a13e-c9d92fbcbf20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.437847] env[68233]: DEBUG nova.compute.provider_tree [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.594394] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "refresh_cache-abdf9de2-8563-4a31-91a3-0c18b0387533" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.594684] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Instance network_info: |[{"id": "f6902fc8-b8ad-4c4c-8056-c9fbf48669b9", "address": "fa:16:3e:14:3b:db", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6902fc8-b8", "ovs_interfaceid": "f6902fc8-b8ad-4c4c-8056-c9fbf48669b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 757.595070] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782165, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.595889] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:3b:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6902fc8-b8ad-4c4c-8056-c9fbf48669b9', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 757.603494] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.603708] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 757.603926] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69d86f2e-0d60-4bb1-87f1-096fbc22e613 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.624592] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 757.624592] env[68233]: value = "task-2782166" [ 757.624592] env[68233]: _type = "Task" [ 757.624592] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.632745] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782166, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.731405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a88a28c-b902-4ebb-96ac-6befb4ffcae1 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.205s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.839015] env[68233]: DEBUG nova.compute.manager [req-dc3b8659-c061-4cac-a059-f2629e54c06e req-6cb7bc48-5749-4fc3-af62-d4adadb33f8c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Received event network-vif-plugged-f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 757.839015] env[68233]: DEBUG oslo_concurrency.lockutils [req-dc3b8659-c061-4cac-a059-f2629e54c06e req-6cb7bc48-5749-4fc3-af62-d4adadb33f8c service nova] Acquiring lock "abdf9de2-8563-4a31-91a3-0c18b0387533-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.839015] env[68233]: DEBUG oslo_concurrency.lockutils [req-dc3b8659-c061-4cac-a059-f2629e54c06e req-6cb7bc48-5749-4fc3-af62-d4adadb33f8c service nova] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.839015] env[68233]: DEBUG oslo_concurrency.lockutils [req-dc3b8659-c061-4cac-a059-f2629e54c06e req-6cb7bc48-5749-4fc3-af62-d4adadb33f8c service nova] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.839389] env[68233]: DEBUG nova.compute.manager [req-dc3b8659-c061-4cac-a059-f2629e54c06e req-6cb7bc48-5749-4fc3-af62-d4adadb33f8c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] No waiting events found dispatching network-vif-plugged-f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 757.839389] env[68233]: WARNING nova.compute.manager [req-dc3b8659-c061-4cac-a059-f2629e54c06e req-6cb7bc48-5749-4fc3-af62-d4adadb33f8c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Received unexpected event network-vif-plugged-f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 for instance with vm_state building and task_state spawning. [ 757.940727] env[68233]: DEBUG nova.scheduler.client.report [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 758.094265] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782165, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.134795] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782166, 'name': CreateVM_Task, 'duration_secs': 0.342405} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.134971] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 758.135677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.135846] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.136195] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 758.136451] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a7f319e-8fab-4591-954a-3c32a2132c57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.140882] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 758.140882] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52715db8-27b1-61bc-916f-2bba846c23db" [ 758.140882] env[68233]: _type = "Task" [ 758.140882] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.148947] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52715db8-27b1-61bc-916f-2bba846c23db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.163697] env[68233]: DEBUG nova.network.neutron [-] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.165837] env[68233]: DEBUG nova.compute.manager [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Received event network-vif-deleted-f7568651-e038-4f28-85d3-597b8faad3fb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 758.166069] env[68233]: DEBUG nova.compute.manager [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 758.166199] env[68233]: DEBUG nova.compute.manager [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing instance network info cache due to event network-changed-2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 758.166419] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] Acquiring lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.166518] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] Acquired lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.166674] env[68233]: DEBUG nova.network.neutron [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Refreshing network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.237797] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 758.241494] env[68233]: DEBUG nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.269794] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 758.270053] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.270215] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 758.270436] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.270586] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 758.270736] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 758.270943] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 758.271343] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 758.271558] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 758.271732] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 758.272320] env[68233]: DEBUG nova.virt.hardware [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 758.273077] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a37f74-9849-499b-8edd-9597ea930977 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.283032] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32efa16b-7e70-42ef-9c32-7ff72e26b33f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.401693] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "86528c8b-b51e-480d-a7bf-013d990d51ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.446351] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.229s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.449965] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.160s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.451353] env[68233]: INFO nova.compute.claims [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.489017] env[68233]: INFO nova.scheduler.client.report [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 5ed44950-8e9b-4f42-9611-d5bff01dc905 [ 758.593421] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782165, 'name': CreateSnapshot_Task, 'duration_secs': 1.218319} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.593855] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 758.594634] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9693732-afb8-4310-8033-0acbd1338dc1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.655451] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52715db8-27b1-61bc-916f-2bba846c23db, 'name': SearchDatastore_Task, 'duration_secs': 0.010744} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.655719] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.656251] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 758.656461] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.656631] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.656787] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 758.657328] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a2ab50d-d1c9-4fd0-9aea-1d96a17d5f1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.666725] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 758.666923] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 758.667660] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27934c1c-1121-4873-8784-f9298b00f995 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.670057] env[68233]: INFO nova.compute.manager [-] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Took 1.68 seconds to deallocate network for instance. [ 758.677652] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 758.677652] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fe3763-5929-8e0c-c53e-2118ece2e57f" [ 758.677652] env[68233]: _type = "Task" [ 758.677652] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.687653] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fe3763-5929-8e0c-c53e-2118ece2e57f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.764364] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.960394] env[68233]: DEBUG nova.network.neutron [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updated VIF entry in instance network info cache for port 2dd0d9e1-b8b6-464b-a497-b32b7ff64400. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.960394] env[68233]: DEBUG nova.network.neutron [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [{"id": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "address": "fa:16:3e:0a:06:90", "network": {"id": "2a7ad23f-7cca-4365-ab42-f36ad6738c15", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-37403553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d53bb0dba91d48ccb92d5fa899086f66", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2dd0d9e1-b8", "ovs_interfaceid": "2dd0d9e1-b8b6-464b-a497-b32b7ff64400", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.999337] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e526fb03-8959-4eb0-bdf4-2d33ef43aecc tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "5ed44950-8e9b-4f42-9611-d5bff01dc905" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.722s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.027666] env[68233]: DEBUG nova.network.neutron [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Successfully updated port: f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 759.114587] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 759.114587] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5e4d76a6-f681-41af-b5ac-fb2697030ba5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.123622] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 759.123622] env[68233]: value = "task-2782167" [ 759.123622] env[68233]: _type = "Task" [ 759.123622] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.133509] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.187781] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fe3763-5929-8e0c-c53e-2118ece2e57f, 'name': SearchDatastore_Task, 'duration_secs': 0.012376} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.188964] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f67d5e75-f9a4-41d7-859a-351bd11d2d0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.195014] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 759.195014] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ff0b17-fe0d-4960-f0a7-875a9033a171" [ 759.195014] env[68233]: _type = "Task" [ 759.195014] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.203141] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ff0b17-fe0d-4960-f0a7-875a9033a171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.249975] env[68233]: INFO nova.compute.manager [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Took 0.58 seconds to detach 1 volumes for instance. [ 759.253363] env[68233]: DEBUG nova.compute.manager [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Deleting volume: 9bb63a6b-3e52-4693-a250-876762d38f26 {{(pid=68233) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 759.466756] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ce3da24-9dd6-47da-82fe-71cb7d80811e req-db178453-5c91-4e9a-87c9-81879e83cd1a service nova] Releasing lock "refresh_cache-ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.530257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.530435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.530600] env[68233]: DEBUG nova.network.neutron [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 759.638521] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.663406] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.663653] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.663859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.663955] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.664443] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.669935] env[68233]: INFO nova.compute.manager [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Terminating instance [ 759.708989] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ff0b17-fe0d-4960-f0a7-875a9033a171, 'name': SearchDatastore_Task, 'duration_secs': 0.012791} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.709830] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.709830] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] abdf9de2-8563-4a31-91a3-0c18b0387533/abdf9de2-8563-4a31-91a3-0c18b0387533.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 759.710074] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52ff04bb-3f4a-422d-9857-38d2f63ae65a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.717054] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 759.717054] env[68233]: value = "task-2782169" [ 759.717054] env[68233]: _type = "Task" [ 759.717054] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.725788] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782169, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.809847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.009826] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8083aa-ae3a-4a4f-9334-83db85c35920 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.019277] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8effa4ef-e527-477c-bc0c-bf3b824239d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.056978] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4498d87a-25ae-4b49-9871-25665b96fa5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.065400] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b97cb09-8710-4f2d-8dbf-e1493f336662 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.080514] env[68233]: DEBUG nova.compute.provider_tree [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.105247] env[68233]: DEBUG nova.network.neutron [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 760.136906] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.139972] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Received event network-changed-f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 760.140295] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Refreshing instance network info cache due to event network-changed-f6902fc8-b8ad-4c4c-8056-c9fbf48669b9. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 760.140592] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Acquiring lock "refresh_cache-abdf9de2-8563-4a31-91a3-0c18b0387533" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.140739] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Acquired lock "refresh_cache-abdf9de2-8563-4a31-91a3-0c18b0387533" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.140901] env[68233]: DEBUG nova.network.neutron [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Refreshing network info cache for port f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.173611] env[68233]: DEBUG nova.compute.manager [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 760.173868] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 760.174849] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4dd40f5-e85b-43ef-b460-b9764ea17354 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.184119] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 760.184567] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7af9e0d7-3cfe-471a-a95e-1d2de970cb28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.191916] env[68233]: DEBUG oslo_vmware.api [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 760.191916] env[68233]: value = "task-2782170" [ 760.191916] env[68233]: _type = "Task" [ 760.191916] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.203064] env[68233]: DEBUG oslo_vmware.api [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782170, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.228971] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782169, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.458303] env[68233]: DEBUG nova.compute.manager [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Received event network-changed-95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 760.458922] env[68233]: DEBUG nova.compute.manager [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Refreshing instance network info cache due to event network-changed-95df51ae-391f-43ee-976f-70e2a4bb769a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 760.459534] env[68233]: DEBUG oslo_concurrency.lockutils [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] Acquiring lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.459888] env[68233]: DEBUG oslo_concurrency.lockutils [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] Acquired lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.460224] env[68233]: DEBUG nova.network.neutron [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Refreshing network info cache for port 95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 760.522710] env[68233]: DEBUG nova.network.neutron [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updating instance_info_cache with network_info: [{"id": "f8c8623e-abba-4da9-8ab2-20413bb09889", "address": "fa:16:3e:36:c6:63", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8c8623e-ab", "ovs_interfaceid": "f8c8623e-abba-4da9-8ab2-20413bb09889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.583975] env[68233]: DEBUG nova.scheduler.client.report [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 760.638079] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.704518] env[68233]: DEBUG oslo_vmware.api [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782170, 'name': PowerOffVM_Task, 'duration_secs': 0.368368} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.704798] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 760.704966] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 760.705432] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef1630c5-1a37-4f68-afca-c81fe85f9519 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.728075] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782169, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695287} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.728365] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] abdf9de2-8563-4a31-91a3-0c18b0387533/abdf9de2-8563-4a31-91a3-0c18b0387533.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 760.728582] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 760.728839] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09521e6d-cd9c-42af-a828-94ad1bda5c9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.736537] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 760.736537] env[68233]: value = "task-2782172" [ 760.736537] env[68233]: _type = "Task" [ 760.736537] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.745828] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.887030] env[68233]: DEBUG nova.network.neutron [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Updated VIF entry in instance network info cache for port f6902fc8-b8ad-4c4c-8056-c9fbf48669b9. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 760.887205] env[68233]: DEBUG nova.network.neutron [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Updating instance_info_cache with network_info: [{"id": "f6902fc8-b8ad-4c4c-8056-c9fbf48669b9", "address": "fa:16:3e:14:3b:db", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6902fc8-b8", "ovs_interfaceid": "f6902fc8-b8ad-4c4c-8056-c9fbf48669b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.894888] env[68233]: DEBUG nova.compute.manager [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.895258] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.895614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.895920] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.896238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.896531] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.899829] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec13eff1-5da7-4bc8-bccf-42b5c410376d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.905436] env[68233]: INFO nova.compute.manager [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Terminating instance [ 761.029304] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.029304] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Instance network_info: |[{"id": "f8c8623e-abba-4da9-8ab2-20413bb09889", "address": "fa:16:3e:36:c6:63", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8c8623e-ab", "ovs_interfaceid": "f8c8623e-abba-4da9-8ab2-20413bb09889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 761.029304] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:c6:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f8c8623e-abba-4da9-8ab2-20413bb09889', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 761.039044] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating folder: Project (14d2a0ead80a4efba8420023c31f8f11). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 761.040667] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ce873d0-5f0f-441a-a1a9-357f6b1ce67b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.056066] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created folder: Project (14d2a0ead80a4efba8420023c31f8f11) in parent group-v559223. [ 761.056066] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating folder: Instances. Parent ref: group-v559351. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 761.056066] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de4db6a9-59e1-4891-b4ad-ef4ee8dcf022 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.065207] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created folder: Instances in parent group-v559351. [ 761.065931] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 761.065931] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 761.066086] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb72e732-e56b-4b5d-a712-fb4f2fd39316 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.090099] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 761.090099] env[68233]: value = "task-2782175" [ 761.090099] env[68233]: _type = "Task" [ 761.090099] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.090099] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.640s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.090331] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 761.096659] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.715s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.096909] env[68233]: DEBUG nova.objects.instance [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lazy-loading 'resources' on Instance uuid 35cbc15b-48d8-4acd-a957-eec3421df1ce {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 761.105165] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.139695] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.247767] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.279736} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.248236] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.249066] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6386ea9-a6ec-4e0a-a151-3455870adbca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.273591] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] abdf9de2-8563-4a31-91a3-0c18b0387533/abdf9de2-8563-4a31-91a3-0c18b0387533.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.274653] env[68233]: DEBUG nova.network.neutron [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updated VIF entry in instance network info cache for port 95df51ae-391f-43ee-976f-70e2a4bb769a. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 761.275075] env[68233]: DEBUG nova.network.neutron [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updating instance_info_cache with network_info: [{"id": "95df51ae-391f-43ee-976f-70e2a4bb769a", "address": "fa:16:3e:24:9e:48", "network": {"id": "2e377502-5ec6-4c6f-9049-a2edcac7bac1", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-584462734-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5c12a9e11dd403dbe6bdc1c7793040a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "939c05b6-8f31-4f3a-95ac-6297e0bd243e", "external-id": "nsx-vlan-transportzone-825", "segmentation_id": 825, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95df51ae-39", "ovs_interfaceid": "95df51ae-391f-43ee-976f-70e2a4bb769a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.276233] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3d1d6e1-8b0e-439d-98c1-f5cb337c2f8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.308603] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 761.308603] env[68233]: value = "task-2782176" [ 761.308603] env[68233]: _type = "Task" [ 761.308603] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.323517] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782176, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.390855] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Releasing lock "refresh_cache-abdf9de2-8563-4a31-91a3-0c18b0387533" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.391143] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Received event network-vif-deleted-1ac399a3-6f36-48cc-8104-c828a414b1b0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.391398] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Received event network-vif-plugged-f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.391624] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Acquiring lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.391886] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.392105] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.392280] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] No waiting events found dispatching network-vif-plugged-f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.392495] env[68233]: WARNING nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Received unexpected event network-vif-plugged-f8c8623e-abba-4da9-8ab2-20413bb09889 for instance with vm_state building and task_state spawning. [ 761.392680] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Received event network-changed-f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.392827] env[68233]: DEBUG nova.compute.manager [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Refreshing instance network info cache due to event network-changed-f8c8623e-abba-4da9-8ab2-20413bb09889. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 761.393061] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Acquiring lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.393236] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Acquired lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.393402] env[68233]: DEBUG nova.network.neutron [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Refreshing network info cache for port f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.410442] env[68233]: DEBUG nova.compute.manager [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.410505] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.411796] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233f3cde-1069-4691-b9e2-c6348708c78a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.423187] env[68233]: INFO nova.compute.manager [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] instance snapshotting [ 761.423187] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.423187] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98e564ae-8e40-43f3-ac20-d8aab3f601d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.425360] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297d5a1f-09c8-4cb8-b4fb-a5bc87c8360b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.446975] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44839bc1-93bf-483c-a922-b4dea0c00645 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.449818] env[68233]: DEBUG oslo_vmware.api [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 761.449818] env[68233]: value = "task-2782177" [ 761.449818] env[68233]: _type = "Task" [ 761.449818] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.456397] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.456522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.456764] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.457513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.457513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.465028] env[68233]: INFO nova.compute.manager [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Terminating instance [ 761.467498] env[68233]: DEBUG oslo_vmware.api [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.602745] env[68233]: DEBUG nova.compute.utils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 761.607436] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.608453] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 761.608634] env[68233]: DEBUG nova.network.neutron [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 761.615653] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.616298] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.639328] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.653186] env[68233]: DEBUG nova.policy [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bc3bd33b2e2143f8be165a10e4665c7c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '963898fb1cae4e6e9438ace9dd437f9e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 761.801913] env[68233]: DEBUG oslo_concurrency.lockutils [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] Releasing lock "refresh_cache-6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.802224] env[68233]: DEBUG nova.compute.manager [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Received event network-changed-bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 761.802398] env[68233]: DEBUG nova.compute.manager [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Refreshing instance network info cache due to event network-changed-bc63fa79-ccc0-4ad9-b4df-185add5228eb. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 761.802617] env[68233]: DEBUG oslo_concurrency.lockutils [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] Acquiring lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.803240] env[68233]: DEBUG oslo_concurrency.lockutils [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] Acquired lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.803240] env[68233]: DEBUG nova.network.neutron [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Refreshing network info cache for port bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.823415] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782176, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.967340] env[68233]: DEBUG oslo_vmware.api [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782177, 'name': PowerOffVM_Task, 'duration_secs': 0.318733} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.967340] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 761.967340] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 761.967340] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 761.967340] env[68233]: DEBUG nova.compute.manager [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 761.967340] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 761.967678] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-24697954-317f-4820-bf2f-9d49244da54f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.969907] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-202bc70a-924a-4ded-ae51-207e95b79476 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.975285] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fa49ec-3da6-4fcc-b88b-3c6ccc52a7da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.989137] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 761.994051] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41f7d89c-a2fc-4b7c-9de4-5966e058939c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.994247] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 761.994247] env[68233]: value = "task-2782178" [ 761.994247] env[68233]: _type = "Task" [ 761.994247] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.006409] env[68233]: DEBUG oslo_vmware.api [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 762.006409] env[68233]: value = "task-2782180" [ 762.006409] env[68233]: _type = "Task" [ 762.006409] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.027666] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782178, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.037514] env[68233]: DEBUG oslo_vmware.api [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782180, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.111312] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 762.114667] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.129279] env[68233]: DEBUG nova.network.neutron [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Successfully created port: 9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 762.149278] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.329721] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782176, 'name': ReconfigVM_Task, 'duration_secs': 0.6834} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.329721] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Reconfigured VM instance instance-0000002d to attach disk [datastore1] abdf9de2-8563-4a31-91a3-0c18b0387533/abdf9de2-8563-4a31-91a3-0c18b0387533.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.329721] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5358b86f-1956-4a87-8d78-f76d1083a26b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.340739] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 762.340739] env[68233]: value = "task-2782181" [ 762.340739] env[68233]: _type = "Task" [ 762.340739] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.360634] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782181, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.362252] env[68233]: DEBUG nova.network.neutron [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updated VIF entry in instance network info cache for port f8c8623e-abba-4da9-8ab2-20413bb09889. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.362945] env[68233]: DEBUG nova.network.neutron [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updating instance_info_cache with network_info: [{"id": "f8c8623e-abba-4da9-8ab2-20413bb09889", "address": "fa:16:3e:36:c6:63", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8c8623e-ab", "ovs_interfaceid": "f8c8623e-abba-4da9-8ab2-20413bb09889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.367532] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b33b22-45c6-4bac-9c4d-39565ff006fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.381783] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d8d460-2578-49c2-abce-4a7d1f81cbdc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.416130] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3206bbc-ef1c-4914-8fd4-43130b58bc72 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.429557] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4de61fb-2d67-47cc-9b5f-9299cb8d2b50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.446475] env[68233]: DEBUG nova.compute.provider_tree [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.509623] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782178, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.523523] env[68233]: DEBUG oslo_vmware.api [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782180, 'name': PowerOffVM_Task, 'duration_secs': 0.183359} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.523817] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.524189] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.524771] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b288966b-b882-49be-ad25-e0d1cdbe263c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.606987] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.645965] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.742800] env[68233]: DEBUG nova.network.neutron [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Updated VIF entry in instance network info cache for port bc63fa79-ccc0-4ad9-b4df-185add5228eb. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 762.742800] env[68233]: DEBUG nova.network.neutron [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Updating instance_info_cache with network_info: [{"id": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "address": "fa:16:3e:21:98:88", "network": {"id": "df9005ef-abd2-4fe3-98ae-8765a54c74ef", "bridge": "br-int", "label": "tempest-ServersTestJSON-409220955-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.226", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "489301846eb44533a5fbab92d9da4bf6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "77aa121f-8fb6-42f3-aaea-43addfe449b2", "external-id": "nsx-vlan-transportzone-288", "segmentation_id": 288, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc63fa79-cc", "ovs_interfaceid": "bc63fa79-ccc0-4ad9-b4df-185add5228eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.850884] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782181, 'name': Rename_Task, 'duration_secs': 0.229886} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.851224] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.851522] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5f9246c-75f6-48cf-b968-9032d7cd06f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.857983] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 762.857983] env[68233]: value = "task-2782183" [ 762.857983] env[68233]: _type = "Task" [ 762.857983] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.865986] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.873787] env[68233]: DEBUG oslo_concurrency.lockutils [req-dae08684-e907-4989-80d4-ddb0bf8c7e1f req-97236dda-4d5e-4702-8722-14f0fcdfc37c service nova] Releasing lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.955994] env[68233]: DEBUG nova.scheduler.client.report [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.006387] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782178, 'name': CreateSnapshot_Task, 'duration_secs': 0.829363} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.006678] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 763.007478] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24380a3-5c0e-43b3-b29a-9caa2cb8b250 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.104202] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.126816] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 763.145190] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.157713] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.157980] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.158905] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.158905] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.158905] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.158905] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.158905] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.159304] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.159563] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.159773] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.161266] env[68233]: DEBUG nova.virt.hardware [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.162191] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd28ac6-ff28-433f-838e-d346ab25ed73 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.171073] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd4e485-3378-4bdd-890b-99c34da2434b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.244819] env[68233]: DEBUG oslo_concurrency.lockutils [req-cae422a7-e1cc-4316-8f18-1e0c3790a34e req-b5f66265-c8fe-49d1-be34-276d95b2d994 service nova] Releasing lock "refresh_cache-876d428d-d5c9-422a-aba2-2d6c61b092db" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.368216] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782183, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.463781] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.367s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.467196] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.101s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.467620] env[68233]: DEBUG nova.objects.instance [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lazy-loading 'resources' on Instance uuid a5468df9-c54d-4014-8002-ef82f111a7a4 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.493912] env[68233]: INFO nova.scheduler.client.report [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Deleted allocations for instance 35cbc15b-48d8-4acd-a957-eec3421df1ce [ 763.528972] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 763.529385] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-84fc73a8-381e-4bd6-8ecf-c9858410eff9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.538756] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 763.538756] env[68233]: value = "task-2782184" [ 763.538756] env[68233]: _type = "Task" [ 763.538756] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.549577] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782184, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.601542] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.644269] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.712523] env[68233]: DEBUG nova.compute.manager [req-04379084-f005-4e28-a7dc-7eb0b421d24f req-2d79cf37-4738-4751-bf90-e081ffdc5cf0 service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received event network-vif-plugged-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 763.712783] env[68233]: DEBUG oslo_concurrency.lockutils [req-04379084-f005-4e28-a7dc-7eb0b421d24f req-2d79cf37-4738-4751-bf90-e081ffdc5cf0 service nova] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.714644] env[68233]: DEBUG oslo_concurrency.lockutils [req-04379084-f005-4e28-a7dc-7eb0b421d24f req-2d79cf37-4738-4751-bf90-e081ffdc5cf0 service nova] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.715031] env[68233]: DEBUG oslo_concurrency.lockutils [req-04379084-f005-4e28-a7dc-7eb0b421d24f req-2d79cf37-4738-4751-bf90-e081ffdc5cf0 service nova] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.715130] env[68233]: DEBUG nova.compute.manager [req-04379084-f005-4e28-a7dc-7eb0b421d24f req-2d79cf37-4738-4751-bf90-e081ffdc5cf0 service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] No waiting events found dispatching network-vif-plugged-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 763.715317] env[68233]: WARNING nova.compute.manager [req-04379084-f005-4e28-a7dc-7eb0b421d24f req-2d79cf37-4738-4751-bf90-e081ffdc5cf0 service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received unexpected event network-vif-plugged-9041c031-c9af-4931-8450-0b57b0e71c17 for instance with vm_state building and task_state spawning. [ 763.787244] env[68233]: DEBUG nova.network.neutron [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Successfully updated port: 9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 763.869776] env[68233]: DEBUG oslo_vmware.api [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782183, 'name': PowerOnVM_Task, 'duration_secs': 0.658137} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.870118] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 763.870269] env[68233]: INFO nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Took 8.46 seconds to spawn the instance on the hypervisor. [ 763.870473] env[68233]: DEBUG nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 763.871272] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018edf2c-abba-499e-a660-a2005c7d5f03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.001848] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea3377af-dd94-461e-9049-67414fd3e012 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "35cbc15b-48d8-4acd-a957-eec3421df1ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.088s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.053648] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782184, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.104313] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.145459] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.292358] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.292358] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.292358] env[68233]: DEBUG nova.network.neutron [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.389872] env[68233]: INFO nova.compute.manager [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Took 43.84 seconds to build instance. [ 764.486511] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db052ea2-b8b1-4efa-a993-111665a36514 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.494043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcb2b85-9548-4c80-a8ef-9c4e8e1b1af9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.531956] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6fbed8-5945-42f9-84a4-98dda6295b6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.540854] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4476773-5c53-4f87-b23d-41ec86703b50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.561931] env[68233]: DEBUG nova.compute.provider_tree [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.571625] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782184, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.602364] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.643990] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.683772] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.684110] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.684269] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Deleting the datastore file [datastore2] c8fd5539-8add-45fe-a0ac-8767bf8a330e {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.684587] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8aecb927-8426-487e-9e95-41048db8a006 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.691341] env[68233]: DEBUG oslo_vmware.api [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for the task: (returnval){ [ 764.691341] env[68233]: value = "task-2782185" [ 764.691341] env[68233]: _type = "Task" [ 764.691341] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.699746] env[68233]: DEBUG oslo_vmware.api [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.806225] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.806472] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.806662] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Deleting the datastore file [datastore1] 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.807912] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c237fa8-5fc9-45ae-a8f3-03f35bbd1a5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.810639] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 764.811242] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 764.811242] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Deleting the datastore file [datastore2] 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 764.811242] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67943bdc-9b8b-45a9-84bf-15852138780b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.817783] env[68233]: DEBUG oslo_vmware.api [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for the task: (returnval){ [ 764.817783] env[68233]: value = "task-2782186" [ 764.817783] env[68233]: _type = "Task" [ 764.817783] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.819314] env[68233]: DEBUG oslo_vmware.api [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for the task: (returnval){ [ 764.819314] env[68233]: value = "task-2782187" [ 764.819314] env[68233]: _type = "Task" [ 764.819314] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.831636] env[68233]: DEBUG oslo_vmware.api [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.835357] env[68233]: DEBUG oslo_vmware.api [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782187, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.840720] env[68233]: DEBUG nova.network.neutron [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.892168] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c838cbf8-4bcc-4740-81eb-68b5b1fb770c tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.954s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.003219] env[68233]: DEBUG nova.network.neutron [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.062984] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782184, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.067431] env[68233]: DEBUG nova.scheduler.client.report [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.105272] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782175, 'name': CreateVM_Task, 'duration_secs': 3.861425} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.105368] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 765.106125] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.106312] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.106754] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 765.106973] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b5f3992-4eca-4dda-b1c9-9f9ed49a1a55 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.111751] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 765.111751] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fbdd-038d-637d-4b7a-79c48e342f19" [ 765.111751] env[68233]: _type = "Task" [ 765.111751] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.120813] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fbdd-038d-637d-4b7a-79c48e342f19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.144240] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782167, 'name': CloneVM_Task, 'duration_secs': 5.82878} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.144560] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Created linked-clone VM from snapshot [ 765.145389] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68492208-5ebe-42df-873b-997132d9e4c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.154339] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Uploading image 555dcb44-496d-4c1f-bfcd-213c3ef0145d {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 765.187469] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 765.187469] env[68233]: value = "vm-559350" [ 765.187469] env[68233]: _type = "VirtualMachine" [ 765.187469] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 765.187777] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fe347bef-71f3-4929-bf37-e31d8041794b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.196811] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease: (returnval){ [ 765.196811] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e0119f-30a3-b9d7-01be-a6466ef28441" [ 765.196811] env[68233]: _type = "HttpNfcLease" [ 765.196811] env[68233]: } obtained for exporting VM: (result){ [ 765.196811] env[68233]: value = "vm-559350" [ 765.196811] env[68233]: _type = "VirtualMachine" [ 765.196811] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 765.197203] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the lease: (returnval){ [ 765.197203] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e0119f-30a3-b9d7-01be-a6466ef28441" [ 765.197203] env[68233]: _type = "HttpNfcLease" [ 765.197203] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 765.203324] env[68233]: DEBUG oslo_vmware.api [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Task: {'id': task-2782185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202138} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.203876] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.204078] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.204264] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.204494] env[68233]: INFO nova.compute.manager [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Took 5.03 seconds to destroy the instance on the hypervisor. [ 765.204676] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.204866] env[68233]: DEBUG nova.compute.manager [-] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.204982] env[68233]: DEBUG nova.network.neutron [-] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.207743] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 765.207743] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e0119f-30a3-b9d7-01be-a6466ef28441" [ 765.207743] env[68233]: _type = "HttpNfcLease" [ 765.207743] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 765.208893] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 765.208893] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e0119f-30a3-b9d7-01be-a6466ef28441" [ 765.208893] env[68233]: _type = "HttpNfcLease" [ 765.208893] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 765.209074] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac5d66f9-c3f6-49c7-99e5-a73c53908830 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.216573] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523614f1-3459-177a-b660-9a78af26ccc1/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 765.216761] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523614f1-3459-177a-b660-9a78af26ccc1/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 765.313722] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-145a1e5d-9fc4-4976-9258-91f9a39a1774 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.335273] env[68233]: DEBUG oslo_vmware.api [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Task: {'id': task-2782186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170552} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.335408] env[68233]: DEBUG oslo_vmware.api [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Task: {'id': task-2782187, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17922} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.335581] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.335728] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.336771] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.336771] env[68233]: INFO nova.compute.manager [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Took 3.93 seconds to destroy the instance on the hypervisor. [ 765.336771] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.336771] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 765.336771] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 765.337153] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 765.337153] env[68233]: INFO nova.compute.manager [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Took 3.37 seconds to destroy the instance on the hypervisor. [ 765.337340] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.340206] env[68233]: DEBUG nova.compute.manager [-] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.340302] env[68233]: DEBUG nova.network.neutron [-] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.341967] env[68233]: DEBUG nova.compute.manager [-] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 765.341967] env[68233]: DEBUG nova.network.neutron [-] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 765.390524] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.390524] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.401850] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 765.511027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.511027] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Instance network_info: |[{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 765.511027] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:0d:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0dd3c126-9d86-4f9a-b81c-e9627c7a5401', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9041c031-c9af-4931-8450-0b57b0e71c17', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.519331] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.520202] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 765.521072] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8573f4ce-c202-403a-bb5e-c8aafb92a0fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.551045] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.551045] env[68233]: value = "task-2782189" [ 765.551045] env[68233]: _type = "Task" [ 765.551045] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.562503] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782184, 'name': CloneVM_Task, 'duration_secs': 1.833767} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.566861] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Created linked-clone VM from snapshot [ 765.567129] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782189, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.569088] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a04440-2f51-4a21-b9e3-0549c3f6dce8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.578389] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.111s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.584018] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Uploading image ce6698cf-910f-4968-8c20-766abed67783 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 765.586087] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.619s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.588141] env[68233]: INFO nova.compute.claims [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 765.595792] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 765.596562] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f1dbb961-ce9e-4bb6-9baf-c6ed1f1da96a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.604221] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 765.604221] env[68233]: value = "task-2782190" [ 765.604221] env[68233]: _type = "Task" [ 765.604221] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.617453] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782190, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.618635] env[68233]: INFO nova.scheduler.client.report [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Deleted allocations for instance a5468df9-c54d-4014-8002-ef82f111a7a4 [ 765.633920] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fbdd-038d-637d-4b7a-79c48e342f19, 'name': SearchDatastore_Task, 'duration_secs': 0.011268} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.634298] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.634589] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.634876] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.635687] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.635687] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.635687] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7149fb7a-000b-45e0-8b99-2127acc811e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.644633] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.644951] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.645716] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2f47e5f-0a60-4778-ac27-44b0f0e827cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.653770] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 765.653770] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52952ec0-78d6-8598-db3c-f9d38ea52292" [ 765.653770] env[68233]: _type = "Task" [ 765.653770] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.663047] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52952ec0-78d6-8598-db3c-f9d38ea52292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.724446] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "03688e90-5433-47ca-baaa-75861ad093b7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.724819] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "03688e90-5433-47ca-baaa-75861ad093b7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.735066] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "8880bb83-56f1-4ad2-9d6d-1885826aed21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 765.735310] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.924516] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.073042] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782189, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.095299] env[68233]: DEBUG nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 766.095299] env[68233]: DEBUG nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing instance network info cache due to event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 766.095666] env[68233]: DEBUG oslo_concurrency.lockutils [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.095963] env[68233]: DEBUG oslo_concurrency.lockutils [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.096286] env[68233]: DEBUG nova.network.neutron [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.097869] env[68233]: DEBUG nova.network.neutron [-] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.109178] env[68233]: DEBUG nova.compute.manager [req-a48f4e3f-51dc-4ec0-8cac-95bf2f0be89e req-15231f57-ffcc-4268-a0f6-9afbfbe72c01 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Received event network-vif-deleted-5602cbb3-fef2-4353-917c-04002ea9ac31 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 766.109701] env[68233]: INFO nova.compute.manager [req-a48f4e3f-51dc-4ec0-8cac-95bf2f0be89e req-15231f57-ffcc-4268-a0f6-9afbfbe72c01 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Neutron deleted interface 5602cbb3-fef2-4353-917c-04002ea9ac31; detaching it from the instance and deleting it from the info cache [ 766.109903] env[68233]: DEBUG nova.network.neutron [req-a48f4e3f-51dc-4ec0-8cac-95bf2f0be89e req-15231f57-ffcc-4268-a0f6-9afbfbe72c01 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.124660] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782190, 'name': Destroy_Task, 'duration_secs': 0.508462} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.125031] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Destroyed the VM [ 766.125349] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 766.125792] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a6abe734-92ae-4268-a92d-190f7e4611ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.135018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5c412a76-04d0-4868-9ffe-bae27861eb76 tempest-ServerPasswordTestJSON-1507656040 tempest-ServerPasswordTestJSON-1507656040-project-member] Lock "a5468df9-c54d-4014-8002-ef82f111a7a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.434s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.139988] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 766.139988] env[68233]: value = "task-2782191" [ 766.139988] env[68233]: _type = "Task" [ 766.139988] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.151833] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782191, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.163923] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52952ec0-78d6-8598-db3c-f9d38ea52292, 'name': SearchDatastore_Task, 'duration_secs': 0.011824} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.164131] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-712e2d3b-504c-495f-9610-59eec2c64d47 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.170058] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 766.170058] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52db0c87-3f5a-f0af-b84a-fe7f939f0f2e" [ 766.170058] env[68233]: _type = "Task" [ 766.170058] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.178846] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52db0c87-3f5a-f0af-b84a-fe7f939f0f2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.179246] env[68233]: DEBUG nova.network.neutron [-] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.564374] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782189, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.575902] env[68233]: DEBUG nova.network.neutron [-] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.605400] env[68233]: INFO nova.compute.manager [-] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Took 1.40 seconds to deallocate network for instance. [ 766.618584] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c48fd3e5-3873-4877-897d-5b7f518eed22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.629290] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f003954-bb31-4bcc-b458-896f79407f41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.684112] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782191, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.684767] env[68233]: INFO nova.compute.manager [-] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Took 1.34 seconds to deallocate network for instance. [ 766.685358] env[68233]: DEBUG nova.compute.manager [req-a48f4e3f-51dc-4ec0-8cac-95bf2f0be89e req-15231f57-ffcc-4268-a0f6-9afbfbe72c01 service nova] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Detach interface failed, port_id=5602cbb3-fef2-4353-917c-04002ea9ac31, reason: Instance 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 766.711604] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52db0c87-3f5a-f0af-b84a-fe7f939f0f2e, 'name': SearchDatastore_Task, 'duration_secs': 0.01598} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.714616] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.714941] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] dd59cab5-3f9a-42cc-93f1-75cea940acdd/dd59cab5-3f9a-42cc-93f1-75cea940acdd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.716645] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6f58183-962d-4341-80b4-3f0595587b5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.723092] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 766.723092] env[68233]: value = "task-2782192" [ 766.723092] env[68233]: _type = "Task" [ 766.723092] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.739949] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.937400] env[68233]: DEBUG nova.network.neutron [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updated VIF entry in instance network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.937796] env[68233]: DEBUG nova.network.neutron [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.069899] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782189, 'name': CreateVM_Task, 'duration_secs': 1.435473} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.070123] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.071038] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.071170] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.071568] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.071871] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae0c2dd5-8e04-42af-985e-83ebba955f2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.079454] env[68233]: INFO nova.compute.manager [-] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Took 1.74 seconds to deallocate network for instance. [ 767.079454] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 767.079454] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522c0711-9305-58c1-4e01-d036fe04b287" [ 767.079454] env[68233]: _type = "Task" [ 767.079454] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.097551] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522c0711-9305-58c1-4e01-d036fe04b287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.120626] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.155667] env[68233]: DEBUG oslo_vmware.api [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782191, 'name': RemoveSnapshot_Task, 'duration_secs': 0.82932} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.158987] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 767.204202] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.232837] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782192, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.312375] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f917985-fb59-441f-9fd6-4ece150c931c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.320890] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b032b01d-b483-47a5-ab0d-f32a6e5888f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.360619] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8ede07-1b11-44e0-bc5f-d2d2e6667e23 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.369110] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a32ef0-d1f3-4dae-afa8-76d9330b80d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.384940] env[68233]: DEBUG nova.compute.provider_tree [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.442735] env[68233]: DEBUG oslo_concurrency.lockutils [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.443062] env[68233]: DEBUG nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Received event network-vif-deleted-ade59518-8bb1-4241-9622-856e6284b19f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 767.443252] env[68233]: INFO nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Neutron deleted interface ade59518-8bb1-4241-9622-856e6284b19f; detaching it from the instance and deleting it from the info cache [ 767.443428] env[68233]: DEBUG nova.network.neutron [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.590899] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.595434] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522c0711-9305-58c1-4e01-d036fe04b287, 'name': SearchDatastore_Task, 'duration_secs': 0.055294} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.595752] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.595952] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 767.597026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.597026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.597026] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 767.597026] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca6eabeb-208b-441b-a989-64204482114b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.607150] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 767.607150] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 767.607150] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1389572-b6ed-4d6c-b965-3dceeb5fa2a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.613770] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 767.613770] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52419ac8-4766-1c3f-c8b5-872509056d33" [ 767.613770] env[68233]: _type = "Task" [ 767.613770] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.622634] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52419ac8-4766-1c3f-c8b5-872509056d33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.665031] env[68233]: WARNING nova.compute.manager [None req-a9598406-84a6-4b50-9c88-dd2512301f50 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Image not found during snapshot: nova.exception.ImageNotFound: Image ce6698cf-910f-4968-8c20-766abed67783 could not be found. [ 767.734878] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782192, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.889309] env[68233]: DEBUG nova.scheduler.client.report [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.950721] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0668a80e-2e93-4aa7-affa-298128b7c811 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.961070] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af073373-a33c-4e5e-8c2b-6fff0c6ead90 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.009308] env[68233]: DEBUG nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Detach interface failed, port_id=ade59518-8bb1-4241-9622-856e6284b19f, reason: Instance c8fd5539-8add-45fe-a0ac-8767bf8a330e could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 768.009587] env[68233]: DEBUG nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Received event network-vif-deleted-95df51ae-391f-43ee-976f-70e2a4bb769a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 768.009769] env[68233]: INFO nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Neutron deleted interface 95df51ae-391f-43ee-976f-70e2a4bb769a; detaching it from the instance and deleting it from the info cache [ 768.009982] env[68233]: DEBUG nova.network.neutron [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.074209] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.074542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.074885] env[68233]: INFO nova.compute.manager [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Rebooting instance [ 768.124923] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52419ac8-4766-1c3f-c8b5-872509056d33, 'name': SearchDatastore_Task, 'duration_secs': 0.012769} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.125768] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65792eba-0ff2-4426-8307-f7ef7c358f0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.132026] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 768.132026] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5273453d-dcd7-c629-3b89-db19b0324efb" [ 768.132026] env[68233]: _type = "Task" [ 768.132026] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.139587] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5273453d-dcd7-c629-3b89-db19b0324efb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.233809] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782192, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.357773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.357773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.357773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 768.357773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.357773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.359692] env[68233]: INFO nova.compute.manager [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Terminating instance [ 768.400028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.811s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.400028] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 768.401806] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.688s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 768.405051] env[68233]: INFO nova.compute.claims [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.514297] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60a2e633-baf7-42eb-bbc1-6d9476856e41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.523688] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb33bb48-87e4-47f6-bd86-3b0330728e1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.564193] env[68233]: DEBUG nova.compute.manager [req-a9964fac-4408-4d9d-ba4c-758bca630205 req-b99e961d-d9b2-497c-a4e0-9c2513a7830d service nova] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Detach interface failed, port_id=95df51ae-391f-43ee-976f-70e2a4bb769a, reason: Instance 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 768.602239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.602239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.602239] env[68233]: DEBUG nova.network.neutron [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 769.369258] env[68233]: DEBUG nova.compute.manager [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 769.369640] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 769.370620] env[68233]: DEBUG nova.compute.utils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 769.379686] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ba71ab-57e7-4ee5-9979-d0bae2f4880f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.382580] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 769.382786] env[68233]: DEBUG nova.network.neutron [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 769.397896] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782192, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.67784} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.398176] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 769.398451] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5273453d-dcd7-c629-3b89-db19b0324efb, 'name': SearchDatastore_Task, 'duration_secs': 0.017695} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.398834] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] dd59cab5-3f9a-42cc-93f1-75cea940acdd/dd59cab5-3f9a-42cc-93f1-75cea940acdd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 769.398931] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 769.399278] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4b514c8-90a4-46db-8e3a-1b9109165051 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.400661] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.400908] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/13972b73-8bae-4a2a-a987-b6177381e7c8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.402672] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6011481d-9aa4-476b-8482-f94346802079 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.404536] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-602f656f-54d6-4b92-b76b-1cb25aa4cb7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.411909] env[68233]: DEBUG oslo_vmware.api [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 769.411909] env[68233]: value = "task-2782193" [ 769.411909] env[68233]: _type = "Task" [ 769.411909] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.413851] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 769.413851] env[68233]: value = "task-2782194" [ 769.413851] env[68233]: _type = "Task" [ 769.413851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.414082] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 769.414082] env[68233]: value = "task-2782195" [ 769.414082] env[68233]: _type = "Task" [ 769.414082] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.428274] env[68233]: DEBUG oslo_vmware.api [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782193, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.436294] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.436493] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.447437] env[68233]: DEBUG nova.policy [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '673cdb014f4949baa648cb8d661293eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d6e2fbf0f9c4fb0bf99e71506798d7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 769.797912] env[68233]: DEBUG nova.network.neutron [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Successfully created port: 1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.811874] env[68233]: DEBUG nova.network.neutron [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.877907] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 769.935452] env[68233]: DEBUG oslo_vmware.api [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782193, 'name': PowerOffVM_Task, 'duration_secs': 0.223079} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.935694] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 769.935929] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 769.936212] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fb0c70b-e58d-4e3b-9d36-f464998a4040 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.945357] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782195, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.945722] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070431} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.949451] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 769.950668] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff4bd872-5924-425f-b588-16f9831ea2e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.981917] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] dd59cab5-3f9a-42cc-93f1-75cea940acdd/dd59cab5-3f9a-42cc-93f1-75cea940acdd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 769.985526] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6f5405a-3ec5-48f3-94a9-d9825b1daa1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.012162] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 770.012162] env[68233]: value = "task-2782197" [ 770.012162] env[68233]: _type = "Task" [ 770.012162] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.027188] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.033438] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 770.033783] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 770.034130] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleting the datastore file [datastore2] dcd8cca2-b62c-44a6-9e77-f336d2d39c09 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 770.037728] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ec14071-a7cb-4760-a30d-5b2e6617570c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.048776] env[68233]: DEBUG oslo_vmware.api [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 770.048776] env[68233]: value = "task-2782198" [ 770.048776] env[68233]: _type = "Task" [ 770.048776] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.064389] env[68233]: DEBUG oslo_vmware.api [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.316623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.433100] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659037} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.434194] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/13972b73-8bae-4a2a-a987-b6177381e7c8.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 770.434496] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.434811] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f358ba0-4267-46d0-b9ba-55e20e7421c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.445497] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 770.445497] env[68233]: value = "task-2782199" [ 770.445497] env[68233]: _type = "Task" [ 770.445497] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.455837] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.523508] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782197, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.549902] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767a2534-b51d-4b5a-92ea-be86b4278b3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.572087] env[68233]: DEBUG oslo_vmware.api [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.31449} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.575429] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 770.575429] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 770.575706] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 770.575780] env[68233]: INFO nova.compute.manager [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Took 1.21 seconds to destroy the instance on the hypervisor. [ 770.576062] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 770.576720] env[68233]: DEBUG nova.compute.manager [-] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 770.576822] env[68233]: DEBUG nova.network.neutron [-] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 770.579315] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad345420-2965-4661-8172-b169a2632ebd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.613458] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e7fb67-c5ef-4dfb-bf93-88a25d4b56ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.628018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6602292-7a7a-45a5-8eba-98d45ae997f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.649635] env[68233]: DEBUG nova.compute.provider_tree [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.825018] env[68233]: DEBUG nova.compute.manager [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 770.825018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a823479e-1964-4cbc-8fe8-d5ec11bfbcfd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.894443] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 770.920655] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:51:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='508f22af-e037-4878-8980-ab644bbabaa4',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1432814527',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 770.921270] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.921567] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 770.921856] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.922114] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 770.925020] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 770.925020] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 770.925020] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 770.925020] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 770.925020] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 770.925020] env[68233]: DEBUG nova.virt.hardware [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 770.925020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a26323-3ea5-4380-b963-ac7444b37dcd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.935263] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bb843a-91bc-41f1-9459-f8563ec81d1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.964050] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135133} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.965097] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 770.965285] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61306e29-52ef-4e08-80be-c73903935131 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.990417] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/13972b73-8bae-4a2a-a987-b6177381e7c8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 770.990807] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76b8c1f4-21f7-4cf7-bff6-0825d083df10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.015204] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 771.015204] env[68233]: value = "task-2782200" [ 771.015204] env[68233]: _type = "Task" [ 771.015204] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.029713] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782197, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.033603] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.060900] env[68233]: DEBUG nova.compute.manager [req-4b49f87a-091f-4493-b999-fe35442254fc req-7add9d3f-c867-45d2-be51-0d7308d21243 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Received event network-vif-deleted-ab957e11-50f4-459b-92be-c9bd72946850 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 771.060981] env[68233]: INFO nova.compute.manager [req-4b49f87a-091f-4493-b999-fe35442254fc req-7add9d3f-c867-45d2-be51-0d7308d21243 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Neutron deleted interface ab957e11-50f4-459b-92be-c9bd72946850; detaching it from the instance and deleting it from the info cache [ 771.061412] env[68233]: DEBUG nova.network.neutron [req-4b49f87a-091f-4493-b999-fe35442254fc req-7add9d3f-c867-45d2-be51-0d7308d21243 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.152162] env[68233]: DEBUG nova.scheduler.client.report [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.369134] env[68233]: DEBUG nova.network.neutron [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Successfully updated port: 1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 771.495912] env[68233]: DEBUG nova.compute.manager [req-557a9a14-c6ec-440e-922c-650df68024c2 req-76d18e3c-4703-4fe9-a02e-bfa13f0468f4 service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Received event network-vif-plugged-1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 771.496270] env[68233]: DEBUG oslo_concurrency.lockutils [req-557a9a14-c6ec-440e-922c-650df68024c2 req-76d18e3c-4703-4fe9-a02e-bfa13f0468f4 service nova] Acquiring lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.496551] env[68233]: DEBUG oslo_concurrency.lockutils [req-557a9a14-c6ec-440e-922c-650df68024c2 req-76d18e3c-4703-4fe9-a02e-bfa13f0468f4 service nova] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.496551] env[68233]: DEBUG oslo_concurrency.lockutils [req-557a9a14-c6ec-440e-922c-650df68024c2 req-76d18e3c-4703-4fe9-a02e-bfa13f0468f4 service nova] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.496682] env[68233]: DEBUG nova.compute.manager [req-557a9a14-c6ec-440e-922c-650df68024c2 req-76d18e3c-4703-4fe9-a02e-bfa13f0468f4 service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] No waiting events found dispatching network-vif-plugged-1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 771.496837] env[68233]: WARNING nova.compute.manager [req-557a9a14-c6ec-440e-922c-650df68024c2 req-76d18e3c-4703-4fe9-a02e-bfa13f0468f4 service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Received unexpected event network-vif-plugged-1c30459d-e88b-42bd-8073-04aa89cecbc3 for instance with vm_state building and task_state spawning. [ 771.508775] env[68233]: DEBUG nova.network.neutron [-] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.533915] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782197, 'name': ReconfigVM_Task, 'duration_secs': 1.428903} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.534289] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782200, 'name': ReconfigVM_Task, 'duration_secs': 0.495803} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.535155] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Reconfigured VM instance instance-0000002e to attach disk [datastore1] dd59cab5-3f9a-42cc-93f1-75cea940acdd/dd59cab5-3f9a-42cc-93f1-75cea940acdd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.535426] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/13972b73-8bae-4a2a-a987-b6177381e7c8.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 771.536085] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9524d591-0209-4db5-9640-4818509e7fc9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.538164] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fbafb2a7-3980-4537-8cee-742554522aa9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.547460] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 771.547460] env[68233]: value = "task-2782202" [ 771.547460] env[68233]: _type = "Task" [ 771.547460] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.549498] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 771.549498] env[68233]: value = "task-2782201" [ 771.549498] env[68233]: _type = "Task" [ 771.549498] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.563418] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dae5599e-86cd-454e-8081-7e7eaaf92a09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.570395] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782202, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.573973] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782201, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.582690] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5644060-7648-42cc-ba3a-67820db18180 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.620813] env[68233]: DEBUG nova.compute.manager [req-4b49f87a-091f-4493-b999-fe35442254fc req-7add9d3f-c867-45d2-be51-0d7308d21243 service nova] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Detach interface failed, port_id=ab957e11-50f4-459b-92be-c9bd72946850, reason: Instance dcd8cca2-b62c-44a6-9e77-f336d2d39c09 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 771.658105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.256s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 771.658983] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 771.662030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.229s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 771.663577] env[68233]: INFO nova.compute.claims [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 771.842184] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aeee4e6-c7a9-4f21-a30e-572e1d6b8ea2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.850184] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Doing hard reboot of VM {{(pid=68233) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 771.850534] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-373e015c-3d6c-4756-9fe5-cf60e312aee3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.859132] env[68233]: DEBUG oslo_vmware.api [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 771.859132] env[68233]: value = "task-2782203" [ 771.859132] env[68233]: _type = "Task" [ 771.859132] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.868215] env[68233]: DEBUG oslo_vmware.api [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782203, 'name': ResetVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.873405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.873563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.873766] env[68233]: DEBUG nova.network.neutron [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 772.012303] env[68233]: INFO nova.compute.manager [-] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Took 1.44 seconds to deallocate network for instance. [ 772.066246] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782202, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.069375] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782201, 'name': Rename_Task, 'duration_secs': 0.218056} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.069671] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 772.069920] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc239319-e7bb-4bd4-8ee6-365b1cec0759 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.079270] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 772.079270] env[68233]: value = "task-2782204" [ 772.079270] env[68233]: _type = "Task" [ 772.079270] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.090117] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.168862] env[68233]: DEBUG nova.compute.utils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 772.170317] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 772.170688] env[68233]: DEBUG nova.network.neutron [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 772.217427] env[68233]: DEBUG nova.policy [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f038423be614371a095101a860e15c8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca8b913b2dc64be09fd2419ef97f0694', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 772.370414] env[68233]: DEBUG oslo_vmware.api [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782203, 'name': ResetVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.418888] env[68233]: DEBUG nova.network.neutron [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 772.496145] env[68233]: DEBUG nova.network.neutron [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Successfully created port: 67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.519619] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.569038] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782202, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.588913] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.669813] env[68233]: DEBUG nova.network.neutron [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.677484] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 772.875816] env[68233]: DEBUG oslo_vmware.api [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782203, 'name': ResetVM_Task, 'duration_secs': 0.995657} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.876122] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Did hard reboot of VM {{(pid=68233) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 772.876312] env[68233]: DEBUG nova.compute.manager [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 772.877112] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098434ea-d823-4f37-8a9b-235aca272bb7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.062723] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782202, 'name': Rename_Task, 'duration_secs': 1.24098} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.065467] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 773.065905] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-426a1088-9d1b-42c3-a905-d656a0e30692 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.073639] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 773.073639] env[68233]: value = "task-2782205" [ 773.073639] env[68233]: _type = "Task" [ 773.073639] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.085971] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.092021] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.173687] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.174193] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Instance network_info: |[{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 773.176887] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:02:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1c30459d-e88b-42bd-8073-04aa89cecbc3', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 773.184692] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 773.188841] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 773.188841] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-24fe0bce-9737-45c3-b3ee-7503fd1f39ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.210810] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523614f1-3459-177a-b660-9a78af26ccc1/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 773.212675] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1236a60-5fac-4ce8-a94c-f35980b3d86b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.222580] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523614f1-3459-177a-b660-9a78af26ccc1/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 773.222771] env[68233]: ERROR oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523614f1-3459-177a-b660-9a78af26ccc1/disk-0.vmdk due to incomplete transfer. [ 773.224172] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-476a88e1-c4f7-431f-8dd3-bd3347c852b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.225819] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 773.225819] env[68233]: value = "task-2782206" [ 773.225819] env[68233]: _type = "Task" [ 773.225819] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.236137] env[68233]: DEBUG oslo_vmware.rw_handles [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/523614f1-3459-177a-b660-9a78af26ccc1/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 773.236357] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Uploaded image 555dcb44-496d-4c1f-bfcd-213c3ef0145d to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 773.238863] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 773.242774] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e5d20376-4e79-4df3-b2a5-afd4787697c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.244513] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782206, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.255052] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 773.255052] env[68233]: value = "task-2782207" [ 773.255052] env[68233]: _type = "Task" [ 773.255052] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.268932] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782207, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.332450] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b3668c-1c90-4b2f-b90e-dded671be590 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.341492] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a5d547-c4fd-44dd-b3c6-a0917e609ec4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.373910] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20879490-74d9-41cc-b04b-e71996dba841 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.382556] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd351f7c-28fe-4cbd-9bb5-a6ab286dbef6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.400288] env[68233]: DEBUG nova.compute.provider_tree [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.401828] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb84cfd7-8711-4e5a-8748-806a9a267f3c tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 5.327s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.525508] env[68233]: DEBUG nova.compute.manager [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Received event network-changed-1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 773.525804] env[68233]: DEBUG nova.compute.manager [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Refreshing instance network info cache due to event network-changed-1c30459d-e88b-42bd-8073-04aa89cecbc3. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 773.525936] env[68233]: DEBUG oslo_concurrency.lockutils [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] Acquiring lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.526257] env[68233]: DEBUG oslo_concurrency.lockutils [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] Acquired lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.526257] env[68233]: DEBUG nova.network.neutron [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Refreshing network info cache for port 1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.588110] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782205, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.595018] env[68233]: DEBUG oslo_vmware.api [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782204, 'name': PowerOnVM_Task, 'duration_secs': 1.463893} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.595412] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.595608] env[68233]: INFO nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Took 10.47 seconds to spawn the instance on the hypervisor. [ 773.595788] env[68233]: DEBUG nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.596623] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9498160d-d8d0-43e6-8847-168c2127072e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.707783] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 773.730521] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 773.730803] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.731014] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 773.731251] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.731414] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 773.731568] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 773.731832] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 773.732030] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 773.732229] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 773.732400] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 773.732579] env[68233]: DEBUG nova.virt.hardware [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 773.734386] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd3496c-44fa-4d35-ac7a-089afde7cf1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.748996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6287cff1-6e26-45d0-8688-b6fbfb53c4f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.755924] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782206, 'name': CreateVM_Task, 'duration_secs': 0.440702} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.755924] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 773.755924] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.755924] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.755924] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 773.755924] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14797fa8-d656-4a97-9333-9f52aa47a054 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.778105] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 773.778105] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521063f9-414c-ff6a-8397-2272b82b85ae" [ 773.778105] env[68233]: _type = "Task" [ 773.778105] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.781347] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782207, 'name': Destroy_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.790599] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521063f9-414c-ff6a-8397-2272b82b85ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.832913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.832913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.904042] env[68233]: DEBUG nova.scheduler.client.report [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 774.012551] env[68233]: DEBUG nova.network.neutron [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Successfully updated port: 67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 774.084367] env[68233]: DEBUG oslo_vmware.api [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782205, 'name': PowerOnVM_Task, 'duration_secs': 0.904353} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.084551] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 774.084750] env[68233]: INFO nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Took 15.85 seconds to spawn the instance on the hypervisor. [ 774.084924] env[68233]: DEBUG nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 774.085765] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce6d5d50-f1d5-47ed-8ba8-ca55cb92e878 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.115757] env[68233]: INFO nova.compute.manager [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Took 48.86 seconds to build instance. [ 774.237279] env[68233]: DEBUG nova.network.neutron [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updated VIF entry in instance network info cache for port 1c30459d-e88b-42bd-8073-04aa89cecbc3. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.237279] env[68233]: DEBUG nova.network.neutron [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.280325] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782207, 'name': Destroy_Task, 'duration_secs': 0.716291} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.280630] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Destroyed the VM [ 774.280873] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 774.281688] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-531f4fb0-82b5-4b4e-bc73-b0ed7e6a3769 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.293165] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 774.293165] env[68233]: value = "task-2782208" [ 774.293165] env[68233]: _type = "Task" [ 774.293165] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.297201] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521063f9-414c-ff6a-8397-2272b82b85ae, 'name': SearchDatastore_Task, 'duration_secs': 0.028071} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.300135] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.300387] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 774.300654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.300801] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.300984] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 774.301263] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-182554c7-c69a-4788-bc0c-d5629c54a90f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.308222] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782208, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.311082] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 774.311283] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 774.312097] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b0ab033-1478-45ff-9f2b-9013265721e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.317886] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 774.317886] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524a7940-2920-f08d-6996-a94236c666d8" [ 774.317886] env[68233]: _type = "Task" [ 774.317886] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.326233] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524a7940-2920-f08d-6996-a94236c666d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.336802] env[68233]: DEBUG nova.compute.utils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 774.408932] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.747s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.409491] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 774.412143] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.756s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.412362] env[68233]: DEBUG nova.objects.instance [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lazy-loading 'resources' on Instance uuid 75f58a50-7891-42df-8820-c997300a3159 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.515165] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.515340] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquired lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.515468] env[68233]: DEBUG nova.network.neutron [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 774.605929] env[68233]: INFO nova.compute.manager [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Took 53.48 seconds to build instance. [ 774.617931] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36956ea9-d917-4a7d-aaa9-43677e4c8b65 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.200s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.740148] env[68233]: DEBUG oslo_concurrency.lockutils [req-ed22bbe3-fb76-4adc-b630-15abccd5c2e8 req-5322b17d-72dd-4f3c-a728-13b6fa9a58fc service nova] Releasing lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.808036] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782208, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.829261] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524a7940-2920-f08d-6996-a94236c666d8, 'name': SearchDatastore_Task, 'duration_secs': 0.015616} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.830144] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddbd628b-fdff-4e33-bde0-9b8dc7dc2e5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.835585] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 774.835585] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52062d10-90e7-f440-d0f5-53fdc3abd218" [ 774.835585] env[68233]: _type = "Task" [ 774.835585] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.840413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.843766] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52062d10-90e7-f440-d0f5-53fdc3abd218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.915697] env[68233]: DEBUG nova.compute.utils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 774.917208] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 774.917385] env[68233]: DEBUG nova.network.neutron [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 774.961229] env[68233]: DEBUG nova.policy [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '593e6531c1574bf1ac0e81c5693e24f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e8ffd47b7024dbd9138d2d6963e1eb4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 775.076703] env[68233]: DEBUG nova.network.neutron [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 775.110610] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf00148f-3a79-485f-9989-7bbd74299c59 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.565s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.122164] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 775.179074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "2a88648c-f00d-4d7b-905d-e70c327e248a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.179339] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.179525] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "2a88648c-f00d-4d7b-905d-e70c327e248a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.179702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.179870] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.182259] env[68233]: INFO nova.compute.manager [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Terminating instance [ 775.260101] env[68233]: DEBUG nova.network.neutron [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Updating instance_info_cache with network_info: [{"id": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "address": "fa:16:3e:d5:df:55", "network": {"id": "cd33f971-61f7-45a7-a4be-c2314a8db06c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2045724500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca8b913b2dc64be09fd2419ef97f0694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e903f4-41", "ovs_interfaceid": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.261746] env[68233]: DEBUG nova.network.neutron [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Successfully created port: f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 775.308476] env[68233]: DEBUG oslo_vmware.api [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782208, 'name': RemoveSnapshot_Task, 'duration_secs': 0.720707} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.308796] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 775.309041] env[68233]: INFO nova.compute.manager [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Took 18.78 seconds to snapshot the instance on the hypervisor. [ 775.349588] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52062d10-90e7-f440-d0f5-53fdc3abd218, 'name': SearchDatastore_Task, 'duration_secs': 0.011866} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.349785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.350051] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 775.351021] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5db7b9e-b5bd-4309-9aaf-c8817550233d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.360023] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 775.360023] env[68233]: value = "task-2782209" [ 775.360023] env[68233]: _type = "Task" [ 775.360023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.371154] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.424355] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 775.513021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fa88b6-6345-4438-9db6-ec22284b8f5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.518229] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8c4ea3-2aa5-4966-8b51-b19861e09365 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.550812] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1362b23b-b3ef-4078-83cb-82b870efed8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.574775] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40834454-0713-4d4a-802f-44823fa4fa6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.590280] env[68233]: DEBUG nova.compute.provider_tree [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.614730] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 775.642316] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.689139] env[68233]: DEBUG nova.compute.manager [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 775.689139] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.689139] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ae97f5-33a5-4f00-9958-d3f1766e5638 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.698175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.698175] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-feec4e29-76aa-4252-92a4-0a25896ae971 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.764872] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Releasing lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.765976] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Instance network_info: |[{"id": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "address": "fa:16:3e:d5:df:55", "network": {"id": "cd33f971-61f7-45a7-a4be-c2314a8db06c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2045724500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca8b913b2dc64be09fd2419ef97f0694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e903f4-41", "ovs_interfaceid": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 775.767872] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:df:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e350f83a-f581-4e10-ac16-0b0f7bfd3d38', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67e903f4-4173-44e7-a2c8-1d949ad0bd0d', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.777498] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Creating folder: Project (ca8b913b2dc64be09fd2419ef97f0694). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.780164] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51410605-6b32-4883-be8c-763cae04d185 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.782526] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.784025] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.784025] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleting the datastore file [datastore2] 2a88648c-f00d-4d7b-905d-e70c327e248a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.784025] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a34560e2-7cbd-45a7-a958-8dca0afecdec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.794023] env[68233]: DEBUG oslo_vmware.api [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 775.794023] env[68233]: value = "task-2782212" [ 775.794023] env[68233]: _type = "Task" [ 775.794023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.799345] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Created folder: Project (ca8b913b2dc64be09fd2419ef97f0694) in parent group-v559223. [ 775.800112] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Creating folder: Instances. Parent ref: group-v559358. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 775.800869] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab227856-587a-4384-bafc-2329845aa646 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.808029] env[68233]: DEBUG oslo_vmware.api [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.814152] env[68233]: DEBUG nova.compute.manager [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Instance disappeared during snapshot {{(pid=68233) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 775.818631] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Created folder: Instances in parent group-v559358. [ 775.818631] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.818631] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 775.819545] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-39e291e7-178b-4518-83b7-bbf7f3e75fb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.837643] env[68233]: DEBUG nova.compute.manager [None req-d0a23e51-ad28-46c6-b663-08151d832e9c tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image not found during clean up 555dcb44-496d-4c1f-bfcd-213c3ef0145d {{(pid=68233) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 775.847221] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.847221] env[68233]: value = "task-2782214" [ 775.847221] env[68233]: _type = "Task" [ 775.847221] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.858644] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782214, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.871204] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782209, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.925172] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.925172] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 775.925172] env[68233]: INFO nova.compute.manager [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Attaching volume f22d7265-641d-4b7f-a599-c9b2ea2a5768 to /dev/sdb [ 775.975917] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d4b6b2-d1e4-4990-a531-914f1381e47c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.986258] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d4d339-31f3-4958-927d-44112481dbc5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.009068] env[68233]: DEBUG nova.virt.block_device [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updating existing volume attachment record: 6ec3c9a6-96be-4f6b-a1ea-8d80c59a4e94 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 776.096493] env[68233]: DEBUG nova.scheduler.client.report [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 776.140744] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.310156] env[68233]: DEBUG oslo_vmware.api [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.284875} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.310431] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.310653] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.310837] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.311015] env[68233]: INFO nova.compute.manager [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Took 0.62 seconds to destroy the instance on the hypervisor. [ 776.311271] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.311490] env[68233]: DEBUG nova.compute.manager [-] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 776.311615] env[68233]: DEBUG nova.network.neutron [-] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.356722] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782214, 'name': CreateVM_Task, 'duration_secs': 0.444258} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.356950] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.357690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.358122] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.358280] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.358570] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2f07683-bbc6-444b-8cc1-7a11f0ab1f14 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.371862] env[68233]: DEBUG nova.compute.manager [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Received event network-vif-plugged-67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 776.372036] env[68233]: DEBUG oslo_concurrency.lockutils [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] Acquiring lock "990e1a66-f2ab-4925-b1da-58cdc41a6315-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.372334] env[68233]: DEBUG oslo_concurrency.lockutils [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.372501] env[68233]: DEBUG oslo_concurrency.lockutils [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.372577] env[68233]: DEBUG nova.compute.manager [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] No waiting events found dispatching network-vif-plugged-67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 776.372846] env[68233]: WARNING nova.compute.manager [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Received unexpected event network-vif-plugged-67e903f4-4173-44e7-a2c8-1d949ad0bd0d for instance with vm_state building and task_state spawning. [ 776.372846] env[68233]: DEBUG nova.compute.manager [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Received event network-changed-67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 776.373041] env[68233]: DEBUG nova.compute.manager [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Refreshing instance network info cache due to event network-changed-67e903f4-4173-44e7-a2c8-1d949ad0bd0d. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 776.373191] env[68233]: DEBUG oslo_concurrency.lockutils [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] Acquiring lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.373327] env[68233]: DEBUG oslo_concurrency.lockutils [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] Acquired lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.373484] env[68233]: DEBUG nova.network.neutron [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Refreshing network info cache for port 67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.375797] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 776.375797] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527a206f-3b1e-98dd-6e30-578c9c5920b8" [ 776.375797] env[68233]: _type = "Task" [ 776.375797] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.385299] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782209, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.897684} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.386691] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 776.386982] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 776.387672] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0b3ff41-ae8a-4663-ac35-9ad225bc2b10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.394546] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527a206f-3b1e-98dd-6e30-578c9c5920b8, 'name': SearchDatastore_Task, 'duration_secs': 0.012128} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.395272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.395537] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.395937] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.395937] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.396184] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.396500] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b376af2-d840-45d8-a229-5b82099f3eb7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.401865] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 776.401865] env[68233]: value = "task-2782218" [ 776.401865] env[68233]: _type = "Task" [ 776.401865] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.408446] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.408446] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 776.409467] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b1bd0e5-e354-48fb-8ac5-8ac7ec55bed4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.415148] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782218, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.420327] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 776.420327] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a4065-1382-63c3-2a2d-f420372541de" [ 776.420327] env[68233]: _type = "Task" [ 776.420327] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.429653] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a4065-1382-63c3-2a2d-f420372541de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.439176] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 776.476604] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 776.476838] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 776.476990] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 776.477257] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 776.477407] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 776.477560] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 776.477769] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 776.478449] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 776.478449] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 776.478449] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 776.478449] env[68233]: DEBUG nova.virt.hardware [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 776.479663] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c9dd438-02bd-488a-bf6a-4f29ce0807e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.492416] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42226822-3686-450c-b838-b6dbbaf230a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.602242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.190s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.606022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.787s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.607845] env[68233]: INFO nova.compute.claims [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 776.617407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "c5b42243-878f-4150-a5d3-63d69e474bd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.617407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.639973] env[68233]: INFO nova.scheduler.client.report [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Deleted allocations for instance 75f58a50-7891-42df-8820-c997300a3159 [ 776.790881] env[68233]: DEBUG nova.compute.manager [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Received event network-changed-f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 776.791016] env[68233]: DEBUG nova.compute.manager [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Refreshing instance network info cache due to event network-changed-f8c8623e-abba-4da9-8ab2-20413bb09889. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 776.791251] env[68233]: DEBUG oslo_concurrency.lockutils [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] Acquiring lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.791374] env[68233]: DEBUG oslo_concurrency.lockutils [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] Acquired lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.791555] env[68233]: DEBUG nova.network.neutron [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Refreshing network info cache for port f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.918112] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782218, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074145} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.918390] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 776.919221] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce098a36-293b-400e-9954-a40d67ccf542 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.950823] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 776.957260] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cde8b2d-797a-4c9a-b78f-1de934b96229 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.972220] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a4065-1382-63c3-2a2d-f420372541de, 'name': SearchDatastore_Task, 'duration_secs': 0.010721} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.973829] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5523933a-7225-4bdf-aff6-77236cda6377 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.979850] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 776.979850] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52229b34-e667-2926-766d-62412a8936ae" [ 776.979850] env[68233]: _type = "Task" [ 776.979850] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.982227] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 776.982227] env[68233]: value = "task-2782219" [ 776.982227] env[68233]: _type = "Task" [ 776.982227] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.994581] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782219, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.997676] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52229b34-e667-2926-766d-62412a8936ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.113187] env[68233]: DEBUG nova.network.neutron [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Successfully updated port: f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 777.151127] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3f0a63-eb60-4851-b96c-3115e9376131 tempest-AttachInterfacesUnderV243Test-1967494767 tempest-AttachInterfacesUnderV243Test-1967494767-project-member] Lock "75f58a50-7891-42df-8820-c997300a3159" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.171s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.226192] env[68233]: DEBUG nova.network.neutron [-] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.229015] env[68233]: DEBUG nova.network.neutron [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Updated VIF entry in instance network info cache for port 67e903f4-4173-44e7-a2c8-1d949ad0bd0d. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.234294] env[68233]: DEBUG nova.network.neutron [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Updating instance_info_cache with network_info: [{"id": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "address": "fa:16:3e:d5:df:55", "network": {"id": "cd33f971-61f7-45a7-a4be-c2314a8db06c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2045724500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca8b913b2dc64be09fd2419ef97f0694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e903f4-41", "ovs_interfaceid": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.499770] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52229b34-e667-2926-766d-62412a8936ae, 'name': SearchDatastore_Task, 'duration_secs': 0.038996} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.504788] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.505072] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 990e1a66-f2ab-4925-b1da-58cdc41a6315/990e1a66-f2ab-4925-b1da-58cdc41a6315.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 777.505369] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782219, 'name': ReconfigVM_Task, 'duration_secs': 0.306895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.505906] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-480e5652-23a9-486f-a397-ab55fd6a1d24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.511377] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 777.511377] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1b74e14-b731-4642-ae2b-b6d94037fc5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.518183] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 777.518183] env[68233]: value = "task-2782220" [ 777.518183] env[68233]: _type = "Task" [ 777.518183] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.518183] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 777.518183] env[68233]: value = "task-2782221" [ 777.518183] env[68233]: _type = "Task" [ 777.518183] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.533275] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782220, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.539756] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.611758] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.611912] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 777.612066] env[68233]: DEBUG nova.network.neutron [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.645865] env[68233]: DEBUG nova.network.neutron [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updated VIF entry in instance network info cache for port f8c8623e-abba-4da9-8ab2-20413bb09889. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.646233] env[68233]: DEBUG nova.network.neutron [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updating instance_info_cache with network_info: [{"id": "f8c8623e-abba-4da9-8ab2-20413bb09889", "address": "fa:16:3e:36:c6:63", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8c8623e-ab", "ovs_interfaceid": "f8c8623e-abba-4da9-8ab2-20413bb09889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.736183] env[68233]: INFO nova.compute.manager [-] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Took 1.42 seconds to deallocate network for instance. [ 777.736624] env[68233]: DEBUG oslo_concurrency.lockutils [req-a49f4d8b-3542-4bbd-9bb1-dadbbd2635d9 req-43675277-66e7-43d4-95b0-46440963b9a9 service nova] Releasing lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.036165] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782220, 'name': Rename_Task, 'duration_secs': 0.17164} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.039761] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 778.040049] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.040264] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bbc9049-84fc-40d2-8355-74726f924bfc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.051395] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 778.051395] env[68233]: value = "task-2782222" [ 778.051395] env[68233]: _type = "Task" [ 778.051395] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.063381] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782222, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.151576] env[68233]: DEBUG oslo_concurrency.lockutils [req-5548f392-1363-4e72-8b30-ca43e21485b1 req-eefd8ca0-de2e-4b6b-bb29-dc04fdb9b007 service nova] Releasing lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.175504] env[68233]: DEBUG nova.network.neutron [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 778.223067] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2fdbd0-11e7-433a-97bb-04d3fb766582 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.233403] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e7bc36-9f3b-42e0-8f2c-ad4f4e1c60bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.273293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.278058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1992a6a-3b61-49ff-acf0-498ff610d29f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.287312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34caec99-0892-4ff2-818c-1cbd31eb8c3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.303929] env[68233]: DEBUG nova.compute.provider_tree [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.402126] env[68233]: DEBUG nova.network.neutron [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [{"id": "f6820154-58d1-40c2-b9d9-eefe21708836", "address": "fa:16:3e:19:5e:0c", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6820154-58", "ovs_interfaceid": "f6820154-58d1-40c2-b9d9-eefe21708836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.428483] env[68233]: DEBUG nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 778.428721] env[68233]: DEBUG nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing instance network info cache due to event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 778.428967] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.429145] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.429308] env[68233]: DEBUG nova.network.neutron [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.539334] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.911704} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.539334] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 990e1a66-f2ab-4925-b1da-58cdc41a6315/990e1a66-f2ab-4925-b1da-58cdc41a6315.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 778.539334] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.539334] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c4f3e67c-4645-45f3-bff7-a6fa6f344aaa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.547868] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 778.547868] env[68233]: value = "task-2782224" [ 778.547868] env[68233]: _type = "Task" [ 778.547868] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.575985] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.580960] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782222, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.811663] env[68233]: DEBUG nova.scheduler.client.report [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 778.907017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.907017] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Instance network_info: |[{"id": "f6820154-58d1-40c2-b9d9-eefe21708836", "address": "fa:16:3e:19:5e:0c", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6820154-58", "ovs_interfaceid": "f6820154-58d1-40c2-b9d9-eefe21708836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 778.907017] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:5e:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f6820154-58d1-40c2-b9d9-eefe21708836', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 778.917194] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 778.917194] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 778.917194] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-781aabc1-edb9-4228-804d-14da5dc0171e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.940076] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 778.940076] env[68233]: value = "task-2782225" [ 778.940076] env[68233]: _type = "Task" [ 778.940076] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.948994] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782225, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.007017] env[68233]: DEBUG nova.compute.manager [req-aa19b958-4121-48a6-aae0-b32c01142675 req-bb84a313-c071-4733-be10-20908cec3fac service nova] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Received event network-vif-deleted-5be37989-5969-49ee-9609-29f58ff75d61 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.066452] env[68233]: DEBUG oslo_vmware.api [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782222, 'name': PowerOnVM_Task, 'duration_secs': 0.567303} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.072678] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 779.072932] env[68233]: INFO nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Took 8.18 seconds to spawn the instance on the hypervisor. [ 779.073298] env[68233]: DEBUG nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.074046] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074165} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.074873] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6353d65-5dc9-40a6-a3e5-1ce8d350c4a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.077727] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.078562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a81b603-8a71-4280-8d39-b41d0be3c57a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.105584] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 990e1a66-f2ab-4925-b1da-58cdc41a6315/990e1a66-f2ab-4925-b1da-58cdc41a6315.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.108911] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77562120-abba-4234-806c-d77943db4b1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.140556] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 779.140556] env[68233]: value = "task-2782226" [ 779.140556] env[68233]: _type = "Task" [ 779.140556] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.150888] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.319230] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.319348] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 779.323390] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.303s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.323621] env[68233]: DEBUG nova.objects.instance [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lazy-loading 'resources' on Instance uuid 86528c8b-b51e-480d-a7bf-013d990d51ca {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 779.388890] env[68233]: DEBUG nova.network.neutron [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updated VIF entry in instance network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.389307] env[68233]: DEBUG nova.network.neutron [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.452957] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782225, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.645809] env[68233]: INFO nova.compute.manager [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Took 41.70 seconds to build instance. [ 779.654302] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782226, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.828968] env[68233]: DEBUG nova.compute.utils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 779.833762] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 779.833975] env[68233]: DEBUG nova.network.neutron [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 779.892057] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.892351] env[68233]: DEBUG nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Received event network-vif-plugged-f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.892615] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Acquiring lock "135c2d22-26ac-41a4-a860-accc12dd4c9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.892837] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.893013] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.893196] env[68233]: DEBUG nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] No waiting events found dispatching network-vif-plugged-f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 779.893412] env[68233]: WARNING nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Received unexpected event network-vif-plugged-f6820154-58d1-40c2-b9d9-eefe21708836 for instance with vm_state building and task_state spawning. [ 779.893524] env[68233]: DEBUG nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Received event network-changed-f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 779.893680] env[68233]: DEBUG nova.compute.manager [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Refreshing instance network info cache due to event network-changed-f6820154-58d1-40c2-b9d9-eefe21708836. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 779.893877] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Acquiring lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.894056] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Acquired lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.894227] env[68233]: DEBUG nova.network.neutron [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Refreshing network info cache for port f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.896774] env[68233]: DEBUG nova.policy [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3d1fb4f1c8be43c287e3410a8921836d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4520e2c17033482598edf2bbde01450c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 779.952214] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782225, 'name': CreateVM_Task, 'duration_secs': 0.69821} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.952415] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 779.953045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.953205] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.953508] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 779.953756] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac2eb4b1-fea5-4740-b824-9cdeb51b2987 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.967357] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 779.967357] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520e2f7b-ce1e-6fc9-7fad-12d1ed14456e" [ 779.967357] env[68233]: _type = "Task" [ 779.967357] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.976988] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520e2f7b-ce1e-6fc9-7fad-12d1ed14456e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.149564] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56db3ca0-ca9c-429f-b0f5-886a26d5070d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.593s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.156530] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782226, 'name': ReconfigVM_Task, 'duration_secs': 0.710787} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.156989] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 990e1a66-f2ab-4925-b1da-58cdc41a6315/990e1a66-f2ab-4925-b1da-58cdc41a6315.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.157630] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0549303-6e1f-4958-95e8-b3c52ad22752 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.166421] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 780.166421] env[68233]: value = "task-2782227" [ 780.166421] env[68233]: _type = "Task" [ 780.166421] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.178181] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782227, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.334866] env[68233]: DEBUG nova.compute.utils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 780.435825] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a94483-b633-4c31-beb5-59d1602ee015 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.447487] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5179557f-7404-43dd-b776-c1040da5fc22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.487090] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc9580d-d1e3-451b-b3b4-94c402ed3fba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.492359] env[68233]: DEBUG nova.network.neutron [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Successfully created port: 087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.502812] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520e2f7b-ce1e-6fc9-7fad-12d1ed14456e, 'name': SearchDatastore_Task, 'duration_secs': 0.028611} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.504058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bdf7c3-e99c-441c-851b-b8aeb7cafd95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.509082] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 780.509082] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 780.509261] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.509261] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.509427] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 780.511843] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12b21059-7bd9-4580-98d9-8fb4c88494e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.527021] env[68233]: DEBUG nova.compute.provider_tree [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.528188] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 780.528188] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 780.528860] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-394d6505-ef27-4de8-abc8-04dee76a7740 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.537702] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 780.537702] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c26e2b-074c-2823-8037-e1ef1adda225" [ 780.537702] env[68233]: _type = "Task" [ 780.537702] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.547506] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c26e2b-074c-2823-8037-e1ef1adda225, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.588252] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 780.588524] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559363', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'name': 'volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9eeb90c6-6ac2-43cb-887a-b69a28dc43a6', 'attached_at': '', 'detached_at': '', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'serial': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 780.589488] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-120b4673-3bed-4c15-bc20-997f9e34a62a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.609357] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0baaa861-ccfe-48cb-8c67-5e0f6b2bbaac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.638077] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768/volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 780.638396] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7314e1da-9a8e-4522-a85e-35701cfe008d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.653815] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 780.658608] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Waiting for the task: (returnval){ [ 780.658608] env[68233]: value = "task-2782228" [ 780.658608] env[68233]: _type = "Task" [ 780.658608] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.667667] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782228, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.676255] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782227, 'name': Rename_Task, 'duration_secs': 0.410003} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.676524] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.676994] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96bb8cf5-82f4-4368-9499-83fb852145d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.685554] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 780.685554] env[68233]: value = "task-2782229" [ 780.685554] env[68233]: _type = "Task" [ 780.685554] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.700344] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.756153] env[68233]: DEBUG nova.network.neutron [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updated VIF entry in instance network info cache for port f6820154-58d1-40c2-b9d9-eefe21708836. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.756558] env[68233]: DEBUG nova.network.neutron [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [{"id": "f6820154-58d1-40c2-b9d9-eefe21708836", "address": "fa:16:3e:19:5e:0c", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6820154-58", "ovs_interfaceid": "f6820154-58d1-40c2-b9d9-eefe21708836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.841668] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 780.867710] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.867894] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.030513] env[68233]: DEBUG nova.scheduler.client.report [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 781.051250] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c26e2b-074c-2823-8037-e1ef1adda225, 'name': SearchDatastore_Task, 'duration_secs': 0.034874} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.051250] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd52a899-329d-4b21-87f6-5e4b9588687f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.058692] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 781.058692] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ca371d-e704-85bb-73de-099c25837968" [ 781.058692] env[68233]: _type = "Task" [ 781.058692] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.070274] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ca371d-e704-85bb-73de-099c25837968, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.175420] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782228, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.187859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.202080] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782229, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.262565] env[68233]: DEBUG oslo_concurrency.lockutils [req-c10d08b5-73cf-4912-981b-f85bc5c3441c req-4e56d21d-b312-4fdf-9b81-283333e684eb service nova] Releasing lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.388698] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.388698] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.389268] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.389268] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.389268] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.389675] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.389675] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 781.389675] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.539392] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.215s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.541716] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.371s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.543399] env[68233]: INFO nova.compute.claims [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.570591] env[68233]: INFO nova.scheduler.client.report [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Deleted allocations for instance 86528c8b-b51e-480d-a7bf-013d990d51ca [ 781.579382] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ca371d-e704-85bb-73de-099c25837968, 'name': SearchDatastore_Task, 'duration_secs': 0.014318} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.579918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.580229] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 135c2d22-26ac-41a4-a860-accc12dd4c9a/135c2d22-26ac-41a4-a860-accc12dd4c9a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 781.580506] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc042698-c7fa-496d-8484-c65b9ac4d746 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.590332] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 781.590332] env[68233]: value = "task-2782230" [ 781.590332] env[68233]: _type = "Task" [ 781.590332] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.601572] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782230, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.673412] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782228, 'name': ReconfigVM_Task, 'duration_secs': 0.534431} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.673752] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Reconfigured VM instance instance-00000008 to attach disk [datastore2] volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768/volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 781.683951] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5aac3c4-d4b2-4472-94b1-38e10a47cc60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.703902] env[68233]: DEBUG oslo_vmware.api [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782229, 'name': PowerOnVM_Task, 'duration_secs': 0.662624} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.705417] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.705726] env[68233]: INFO nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Took 8.00 seconds to spawn the instance on the hypervisor. [ 781.705814] env[68233]: DEBUG nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.706161] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Waiting for the task: (returnval){ [ 781.706161] env[68233]: value = "task-2782231" [ 781.706161] env[68233]: _type = "Task" [ 781.706161] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.706867] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3c4023-11ad-4654-957f-9bca85350c48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.724316] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782231, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.854097] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:49:31Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='2051801385',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1505023886',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 781.897025] env[68233]: DEBUG nova.virt.hardware [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 781.897025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.897025] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca60ed38-8e12-4025-ba66-422b8cb784b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.914527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f958fc5-d961-4624-b292-b92ed370a10c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.084305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8fd14e6d-d128-4a75-963f-60005ece9153 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.581s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.085260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 23.684s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.085704] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.089028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.089028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.089360] env[68233]: INFO nova.compute.manager [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Terminating instance [ 782.107036] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782230, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.226021] env[68233]: DEBUG oslo_vmware.api [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782231, 'name': ReconfigVM_Task, 'duration_secs': 0.178011} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.226021] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559363', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'name': 'volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9eeb90c6-6ac2-43cb-887a-b69a28dc43a6', 'attached_at': '', 'detached_at': '', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'serial': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 782.238628] env[68233]: INFO nova.compute.manager [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Took 40.58 seconds to build instance. [ 782.435805] env[68233]: DEBUG nova.compute.manager [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 782.605761] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.605970] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquired lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.606160] env[68233]: DEBUG nova.network.neutron [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 782.611912] env[68233]: DEBUG nova.compute.manager [req-c9225d01-e8e6-4d29-af72-90ea24d19365 req-03a3f368-bed9-46e7-9bbf-f719eacf3ff6 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Received event network-vif-plugged-087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 782.611912] env[68233]: DEBUG oslo_concurrency.lockutils [req-c9225d01-e8e6-4d29-af72-90ea24d19365 req-03a3f368-bed9-46e7-9bbf-f719eacf3ff6 service nova] Acquiring lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.611912] env[68233]: DEBUG oslo_concurrency.lockutils [req-c9225d01-e8e6-4d29-af72-90ea24d19365 req-03a3f368-bed9-46e7-9bbf-f719eacf3ff6 service nova] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.611912] env[68233]: DEBUG oslo_concurrency.lockutils [req-c9225d01-e8e6-4d29-af72-90ea24d19365 req-03a3f368-bed9-46e7-9bbf-f719eacf3ff6 service nova] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.612106] env[68233]: DEBUG nova.compute.manager [req-c9225d01-e8e6-4d29-af72-90ea24d19365 req-03a3f368-bed9-46e7-9bbf-f719eacf3ff6 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] No waiting events found dispatching network-vif-plugged-087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 782.612142] env[68233]: WARNING nova.compute.manager [req-c9225d01-e8e6-4d29-af72-90ea24d19365 req-03a3f368-bed9-46e7-9bbf-f719eacf3ff6 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Received unexpected event network-vif-plugged-087c1c8a-5f17-45b3-bcce-2013fb3783d5 for instance with vm_state building and task_state spawning. [ 782.616789] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782230, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539049} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 782.616789] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 135c2d22-26ac-41a4-a860-accc12dd4c9a/135c2d22-26ac-41a4-a860-accc12dd4c9a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 782.616789] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 782.616967] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ebb0356-0989-44a7-a36c-e9f7213a4d89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.628601] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 782.628601] env[68233]: value = "task-2782232" [ 782.628601] env[68233]: _type = "Task" [ 782.628601] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 782.644566] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782232, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.718894] env[68233]: DEBUG nova.network.neutron [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Successfully updated port: 087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 782.741730] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1e9e1a0a-5970-4224-a6e1-284672a89589 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.273s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.878014] env[68233]: DEBUG nova.compute.manager [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Received event network-changed-67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 782.878278] env[68233]: DEBUG nova.compute.manager [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Refreshing instance network info cache due to event network-changed-67e903f4-4173-44e7-a2c8-1d949ad0bd0d. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 782.878464] env[68233]: DEBUG oslo_concurrency.lockutils [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] Acquiring lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.878607] env[68233]: DEBUG oslo_concurrency.lockutils [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] Acquired lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.878838] env[68233]: DEBUG nova.network.neutron [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Refreshing network info cache for port 67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.957718] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.113678] env[68233]: DEBUG nova.compute.utils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Can not refresh info_cache because instance was not found {{(pid=68233) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 783.142693] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782232, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.19348} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.143072] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 783.144030] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cfc4ab2-c1f0-4d9b-8613-8943ee0ce31e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.170271] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] 135c2d22-26ac-41a4-a860-accc12dd4c9a/135c2d22-26ac-41a4-a860-accc12dd4c9a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 783.174157] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-872a4d96-dce3-45b7-9b34-6db71cd337b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.206820] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 783.206820] env[68233]: value = "task-2782233" [ 783.206820] env[68233]: _type = "Task" [ 783.206820] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.224223] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.224380] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.224538] env[68233]: DEBUG nova.network.neutron [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 783.225797] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.244335] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.285862] env[68233]: DEBUG nova.objects.instance [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lazy-loading 'flavor' on Instance uuid 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 783.337193] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86fc545-ba5c-423d-b014-145d1b7ab5d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.345787] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d6c0c9-a5e5-4949-9905-4693abe7b65b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.398342] env[68233]: DEBUG nova.network.neutron [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.401762] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917f7658-79af-4569-925e-2dbd879bc3c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.414093] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4df4a5-126e-4d5f-9fa5-dfb2eb3dceb5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.440266] env[68233]: DEBUG nova.compute.provider_tree [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.576238] env[68233]: DEBUG nova.network.neutron [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.639205] env[68233]: DEBUG nova.network.neutron [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Updated VIF entry in instance network info cache for port 67e903f4-4173-44e7-a2c8-1d949ad0bd0d. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 783.639655] env[68233]: DEBUG nova.network.neutron [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Updating instance_info_cache with network_info: [{"id": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "address": "fa:16:3e:d5:df:55", "network": {"id": "cd33f971-61f7-45a7-a4be-c2314a8db06c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-2045724500-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca8b913b2dc64be09fd2419ef97f0694", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e350f83a-f581-4e10-ac16-0b0f7bfd3d38", "external-id": "nsx-vlan-transportzone-834", "segmentation_id": 834, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67e903f4-41", "ovs_interfaceid": "67e903f4-4173-44e7-a2c8-1d949ad0bd0d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.719881] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782233, 'name': ReconfigVM_Task, 'duration_secs': 0.472399} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.720043] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Reconfigured VM instance instance-00000032 to attach disk [datastore2] 135c2d22-26ac-41a4-a860-accc12dd4c9a/135c2d22-26ac-41a4-a860-accc12dd4c9a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 783.721116] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bd9319c6-17ac-4e73-8dbd-a63a03873f50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.733800] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 783.733800] env[68233]: value = "task-2782234" [ 783.733800] env[68233]: _type = "Task" [ 783.733800] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.744250] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782234, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.761183] env[68233]: DEBUG nova.network.neutron [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 783.770696] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.791027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92d286c5-a63b-4f5a-933e-4cff71d2089c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.868s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.940859] env[68233]: DEBUG nova.network.neutron [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Updating instance_info_cache with network_info: [{"id": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "address": "fa:16:3e:2b:50:b4", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087c1c8a-5f", "ovs_interfaceid": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.948623] env[68233]: DEBUG nova.scheduler.client.report [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 784.079401] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Releasing lock "refresh_cache-86528c8b-b51e-480d-a7bf-013d990d51ca" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.079936] env[68233]: DEBUG nova.compute.manager [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 784.080176] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.080517] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e44632c5-e94d-483b-83fd-1b07c7919b58 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.091698] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7566b3-e552-4c1e-9ec5-25dacda280d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.129544] env[68233]: WARNING nova.virt.vmwareapi.vmops [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 86528c8b-b51e-480d-a7bf-013d990d51ca could not be found. [ 784.129793] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 784.129982] env[68233]: INFO nova.compute.manager [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Took 0.05 seconds to destroy the instance on the hypervisor. [ 784.130252] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.130560] env[68233]: DEBUG nova.compute.manager [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 784.130652] env[68233]: DEBUG nova.network.neutron [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 784.142681] env[68233]: DEBUG oslo_concurrency.lockutils [req-1117073a-a957-449f-bc05-acebda1ff02c req-848ec37e-44c2-4c07-baae-e06c9fe9a325 service nova] Releasing lock "refresh_cache-990e1a66-f2ab-4925-b1da-58cdc41a6315" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.146925] env[68233]: DEBUG nova.network.neutron [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 784.244262] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782234, 'name': Rename_Task, 'duration_secs': 0.188551} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.244546] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 784.244829] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3a09ec9d-d740-4d33-852b-2fb6cb645416 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.252093] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 784.252093] env[68233]: value = "task-2782235" [ 784.252093] env[68233]: _type = "Task" [ 784.252093] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.261189] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.445779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Releasing lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.446415] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Instance network_info: |[{"id": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "address": "fa:16:3e:2b:50:b4", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087c1c8a-5f", "ovs_interfaceid": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 784.447107] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:50:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ed8a78a1-87dc-488e-a092-afd1c2a2ddde', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '087c1c8a-5f17-45b3-bcce-2013fb3783d5', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 784.460688] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 784.461811] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.920s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.462534] env[68233]: DEBUG nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.466394] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 784.466897] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.438s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.473441] env[68233]: INFO nova.compute.claims [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.477353] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f3d2f1f-6277-4265-aed3-ad661e3f5243 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.511425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "2c219b8c-813d-4155-af3b-327a7ebd75fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.511843] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.517185] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 784.517185] env[68233]: value = "task-2782236" [ 784.517185] env[68233]: _type = "Task" [ 784.517185] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.531559] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782236, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.652787] env[68233]: DEBUG nova.network.neutron [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.702753] env[68233]: DEBUG nova.compute.manager [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Received event network-changed-087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 784.702964] env[68233]: DEBUG nova.compute.manager [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Refreshing instance network info cache due to event network-changed-087c1c8a-5f17-45b3-bcce-2013fb3783d5. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 784.703197] env[68233]: DEBUG oslo_concurrency.lockutils [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] Acquiring lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.703340] env[68233]: DEBUG oslo_concurrency.lockutils [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] Acquired lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.703781] env[68233]: DEBUG nova.network.neutron [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Refreshing network info cache for port 087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.764909] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782235, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.768555] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.768892] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.769189] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.769408] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.769593] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.772133] env[68233]: INFO nova.compute.manager [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Terminating instance [ 785.007013] env[68233]: DEBUG nova.compute.utils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 785.011406] env[68233]: DEBUG nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 785.030406] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782236, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.048664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.048819] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.157551] env[68233]: INFO nova.compute.manager [-] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Took 1.03 seconds to deallocate network for instance. [ 785.262696] env[68233]: DEBUG oslo_vmware.api [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782235, 'name': PowerOnVM_Task, 'duration_secs': 0.560311} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.263087] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 785.263363] env[68233]: INFO nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Took 8.82 seconds to spawn the instance on the hypervisor. [ 785.263560] env[68233]: DEBUG nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 785.264364] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72ea3f6-8c69-4be3-a6e2-5fb95e54c4f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.276053] env[68233]: DEBUG nova.compute.manager [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 785.276275] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.277275] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597ef7d7-5196-4fcc-91c3-769845a4480c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.285540] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 785.285815] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cd9ecd4-0d11-4a7e-87e6-7828c7d81e28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.294572] env[68233]: DEBUG oslo_vmware.api [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 785.294572] env[68233]: value = "task-2782237" [ 785.294572] env[68233]: _type = "Task" [ 785.294572] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.307047] env[68233]: DEBUG oslo_vmware.api [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.419105] env[68233]: DEBUG nova.network.neutron [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Updated VIF entry in instance network info cache for port 087c1c8a-5f17-45b3-bcce-2013fb3783d5. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 785.419492] env[68233]: DEBUG nova.network.neutron [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Updating instance_info_cache with network_info: [{"id": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "address": "fa:16:3e:2b:50:b4", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087c1c8a-5f", "ovs_interfaceid": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.512372] env[68233]: DEBUG nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 785.532435] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782236, 'name': CreateVM_Task, 'duration_secs': 0.554445} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.532661] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 785.533666] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.533900] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.534589] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 785.534739] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-773bf099-b481-4ad0-b277-6445af76ef7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.540774] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 785.540774] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5262a1b2-e109-2518-6ab9-a653a0f3fe71" [ 785.540774] env[68233]: _type = "Task" [ 785.540774] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.553912] env[68233]: INFO nova.compute.manager [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Detaching volume f22d7265-641d-4b7f-a599-c9b2ea2a5768 [ 785.556054] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5262a1b2-e109-2518-6ab9-a653a0f3fe71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.592902] env[68233]: INFO nova.virt.block_device [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Attempting to driver detach volume f22d7265-641d-4b7f-a599-c9b2ea2a5768 from mountpoint /dev/sdb [ 785.593222] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 785.593470] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559363', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'name': 'volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9eeb90c6-6ac2-43cb-887a-b69a28dc43a6', 'attached_at': '', 'detached_at': '', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'serial': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 785.594985] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda0d03d-41cb-444a-a72d-be38a1be4d9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.633221] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b55dfd5e-49cc-49de-a851-964cf52f804f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.644814] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c451ac40-08f2-4f6a-8f86-d7b77a16ba67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.678634] env[68233]: INFO nova.compute.manager [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance disappeared during terminate [ 785.678963] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8734e98a-bc95-49c2-a19c-ab851f8b41a6 tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "86528c8b-b51e-480d-a7bf-013d990d51ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.594s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.684967] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dde3f03-7603-4a74-967e-f1d58b3a073e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.708877] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] The volume has not been displaced from its original location: [datastore2] volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768/volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 785.713743] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Reconfiguring VM instance instance-00000008 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 785.717313] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aff52caa-110a-4164-b719-41027cc2eca3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.740043] env[68233]: DEBUG oslo_vmware.api [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Waiting for the task: (returnval){ [ 785.740043] env[68233]: value = "task-2782238" [ 785.740043] env[68233]: _type = "Task" [ 785.740043] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.755065] env[68233]: DEBUG oslo_vmware.api [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782238, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.784176] env[68233]: INFO nova.compute.manager [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Took 40.39 seconds to build instance. [ 785.809418] env[68233]: DEBUG oslo_vmware.api [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782237, 'name': PowerOffVM_Task, 'duration_secs': 0.454698} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.809682] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 785.809850] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 785.810137] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f24ebb30-29f8-4e04-baa4-5d0782ed2d87 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.923205] env[68233]: DEBUG oslo_concurrency.lockutils [req-492cbb3c-c3c8-4e0e-9699-705ad717e512 req-e0ba0a81-5bc0-4a72-9adb-6d07333f35f7 service nova] Releasing lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.025521] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 786.025897] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 786.026048] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Deleting the datastore file [datastore2] ba4ad2f8-fad1-45be-b2b1-68c3a58f3750 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.026547] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3dc9442-218c-42d9-bc96-a825f0213dc6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.036544] env[68233]: DEBUG oslo_vmware.api [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for the task: (returnval){ [ 786.036544] env[68233]: value = "task-2782240" [ 786.036544] env[68233]: _type = "Task" [ 786.036544] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.050475] env[68233]: DEBUG oslo_vmware.api [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.058467] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5262a1b2-e109-2518-6ab9-a653a0f3fe71, 'name': SearchDatastore_Task, 'duration_secs': 0.016412} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.058848] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.059183] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 786.059529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.059764] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.060090] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 786.060371] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-155cb751-df43-425a-8a40-9179d314acdc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.075731] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 786.076060] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 786.077386] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5d67447-9311-4716-8633-0cd66d4c6696 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.089404] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 786.089404] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52105d4b-39dc-8243-efd7-c2ad3b4d7c8d" [ 786.089404] env[68233]: _type = "Task" [ 786.089404] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.105055] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52105d4b-39dc-8243-efd7-c2ad3b4d7c8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.166900] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821a9e78-249e-402d-90f6-3d109ab3b913 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.176437] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b617e1-fc27-4b62-bab0-be54e707fc9f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.211770] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72175838-4381-45f3-b8db-be0f739c1744 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.220853] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e578a1d-45f7-403f-aa41-d349b4260446 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.237774] env[68233]: DEBUG nova.compute.provider_tree [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.251548] env[68233]: DEBUG oslo_vmware.api [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782238, 'name': ReconfigVM_Task, 'duration_secs': 0.320879} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.251548] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Reconfigured VM instance instance-00000008 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 786.256492] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eed17524-0f2a-43f2-ba4c-c8cbaf0bfb9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.276063] env[68233]: DEBUG oslo_vmware.api [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Waiting for the task: (returnval){ [ 786.276063] env[68233]: value = "task-2782241" [ 786.276063] env[68233]: _type = "Task" [ 786.276063] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.285171] env[68233]: DEBUG oslo_vmware.api [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782241, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.285602] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7a72bcc8-1848-4696-a934-b02d2a5a18eb tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.595s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.527930] env[68233]: DEBUG nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 786.547139] env[68233]: DEBUG oslo_vmware.api [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Task: {'id': task-2782240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214513} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.549430] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 786.549835] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 786.549935] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 786.550632] env[68233]: INFO nova.compute.manager [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Took 1.27 seconds to destroy the instance on the hypervisor. [ 786.551904] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.551904] env[68233]: DEBUG nova.compute.manager [-] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 786.551904] env[68233]: DEBUG nova.network.neutron [-] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 786.558734] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 786.558996] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.559172] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 786.559357] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.559522] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 786.559697] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 786.559898] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 786.560081] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 786.560290] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 786.560483] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 786.560721] env[68233]: DEBUG nova.virt.hardware [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 786.561725] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa5e952-6d57-4446-99a4-c967e2f9e836 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.572432] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f56962-a867-4617-84c5-35816793f7d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.590398] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.596026] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Creating folder: Project (75e0cd5cd34f4fe88c8c4cc5fd9abea0). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.596535] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08cc35a0-0514-4575-8d8d-3143a04e1e66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.609776] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52105d4b-39dc-8243-efd7-c2ad3b4d7c8d, 'name': SearchDatastore_Task, 'duration_secs': 0.01113} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.609776] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef344925-8599-44f9-9df2-40b0bf6d86a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.614026] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Created folder: Project (75e0cd5cd34f4fe88c8c4cc5fd9abea0) in parent group-v559223. [ 786.614255] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Creating folder: Instances. Parent ref: group-v559366. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 786.614936] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d1d9832-f982-435e-bba2-e4586ec40e4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.618255] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 786.618255] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d76c22-7fc1-7655-6d66-73f7cedf5bfe" [ 786.618255] env[68233]: _type = "Task" [ 786.618255] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.631366] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d76c22-7fc1-7655-6d66-73f7cedf5bfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.633742] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Created folder: Instances in parent group-v559366. [ 786.634000] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.634207] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.634412] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a23e0b1-373b-4982-ba6d-225505ccb449 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.652870] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.652870] env[68233]: value = "task-2782244" [ 786.652870] env[68233]: _type = "Task" [ 786.652870] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.662688] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782244, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.740900] env[68233]: DEBUG nova.scheduler.client.report [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.788331] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 786.795022] env[68233]: DEBUG oslo_vmware.api [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Task: {'id': task-2782241, 'name': ReconfigVM_Task, 'duration_secs': 0.17529} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.795022] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559363', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'name': 'volume-f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9eeb90c6-6ac2-43cb-887a-b69a28dc43a6', 'attached_at': '', 'detached_at': '', 'volume_id': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768', 'serial': 'f22d7265-641d-4b7f-a599-c9b2ea2a5768'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 787.011751] env[68233]: DEBUG nova.compute.manager [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Received event network-changed-f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 787.011953] env[68233]: DEBUG nova.compute.manager [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Refreshing instance network info cache due to event network-changed-f6820154-58d1-40c2-b9d9-eefe21708836. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 787.012251] env[68233]: DEBUG oslo_concurrency.lockutils [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] Acquiring lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.012346] env[68233]: DEBUG oslo_concurrency.lockutils [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] Acquired lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.012491] env[68233]: DEBUG nova.network.neutron [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Refreshing network info cache for port f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.093189] env[68233]: DEBUG nova.compute.manager [req-4e793402-38bc-40dd-b69e-9772980d0f0e req-77865594-4d00-4d12-a701-eead4e1025bd service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Received event network-vif-deleted-2dd0d9e1-b8b6-464b-a497-b32b7ff64400 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 787.093484] env[68233]: INFO nova.compute.manager [req-4e793402-38bc-40dd-b69e-9772980d0f0e req-77865594-4d00-4d12-a701-eead4e1025bd service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Neutron deleted interface 2dd0d9e1-b8b6-464b-a497-b32b7ff64400; detaching it from the instance and deleting it from the info cache [ 787.093624] env[68233]: DEBUG nova.network.neutron [req-4e793402-38bc-40dd-b69e-9772980d0f0e req-77865594-4d00-4d12-a701-eead4e1025bd service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.131161] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d76c22-7fc1-7655-6d66-73f7cedf5bfe, 'name': SearchDatastore_Task, 'duration_secs': 0.012532} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.131161] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.131405] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26/48270554-abe4-4f72-b8b9-5f2de6a9ed26.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.131681] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc5608fe-1a5b-46e2-85be-516c1a0a15cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.141094] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 787.141094] env[68233]: value = "task-2782245" [ 787.141094] env[68233]: _type = "Task" [ 787.141094] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.151548] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.164835] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782244, 'name': CreateVM_Task, 'duration_secs': 0.502874} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.164978] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.165459] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.165659] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.166026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 787.166318] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38bc4df0-5b3d-49c3-aa7c-86907e548d2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.173063] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 787.173063] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52034e12-bc98-3194-d7a2-fb3e67a6efc2" [ 787.173063] env[68233]: _type = "Task" [ 787.173063] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.181046] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52034e12-bc98-3194-d7a2-fb3e67a6efc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.246019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.779s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.246633] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 787.249442] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.485s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.251304] env[68233]: INFO nova.compute.claims [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.316061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.345642] env[68233]: DEBUG nova.network.neutron [-] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.350211] env[68233]: DEBUG nova.objects.instance [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lazy-loading 'flavor' on Instance uuid 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 787.596579] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f5c3048-d3ff-40ed-9221-63f5f1a5f158 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.607084] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5103b885-1b98-4915-917d-8428a36ccedc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.650353] env[68233]: DEBUG nova.compute.manager [req-4e793402-38bc-40dd-b69e-9772980d0f0e req-77865594-4d00-4d12-a701-eead4e1025bd service nova] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Detach interface failed, port_id=2dd0d9e1-b8b6-464b-a497-b32b7ff64400, reason: Instance ba4ad2f8-fad1-45be-b2b1-68c3a58f3750 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 787.662360] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782245, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.684667] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52034e12-bc98-3194-d7a2-fb3e67a6efc2, 'name': SearchDatastore_Task, 'duration_secs': 0.025658} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.686153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.686153] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.686153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.686153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.686153] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.686153] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a96fdb0-3aaa-4f77-bf68-4c116adc40f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.702361] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.702556] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.703498] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 787.703729] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.704016] env[68233]: INFO nova.compute.manager [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Rebooting instance [ 787.705395] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8078eb4f-adf7-486d-9469-b9617a7015f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.712570] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 787.712570] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e532dc-66c8-7565-785d-18fdc5708627" [ 787.712570] env[68233]: _type = "Task" [ 787.712570] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.722875] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e532dc-66c8-7565-785d-18fdc5708627, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.756797] env[68233]: DEBUG nova.compute.utils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 787.760040] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 787.760191] env[68233]: DEBUG nova.network.neutron [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 787.767019] env[68233]: DEBUG nova.network.neutron [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updated VIF entry in instance network info cache for port f6820154-58d1-40c2-b9d9-eefe21708836. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.767019] env[68233]: DEBUG nova.network.neutron [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [{"id": "f6820154-58d1-40c2-b9d9-eefe21708836", "address": "fa:16:3e:19:5e:0c", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6820154-58", "ovs_interfaceid": "f6820154-58d1-40c2-b9d9-eefe21708836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.807942] env[68233]: DEBUG nova.policy [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd5e8aa96a1d44fc39fc1df7a7d1bfaa3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92a270ac60fd4416900ba7b2c047d559', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 787.851192] env[68233]: INFO nova.compute.manager [-] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Took 1.30 seconds to deallocate network for instance. [ 788.161076] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.637792} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.161390] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26/48270554-abe4-4f72-b8b9-5f2de6a9ed26.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.161474] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.161868] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d5c3542-08bf-4cd1-8532-0b1bcabb804e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.168987] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 788.168987] env[68233]: value = "task-2782246" [ 788.168987] env[68233]: _type = "Task" [ 788.168987] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.177831] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782246, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.222703] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e532dc-66c8-7565-785d-18fdc5708627, 'name': SearchDatastore_Task, 'duration_secs': 0.043474} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.223521] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6875fc4-f737-4702-b8d8-ff06a0d4c78c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.229094] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 788.229094] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52411bba-4f17-b8a9-ee54-86cac98c5833" [ 788.229094] env[68233]: _type = "Task" [ 788.229094] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.230190] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.241073] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52411bba-4f17-b8a9-ee54-86cac98c5833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.261116] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 788.269167] env[68233]: DEBUG oslo_concurrency.lockutils [req-bd838041-d94f-4a12-b206-e0b83660c0c0 req-cfd890cb-dbd8-4aaa-867f-7d62cd715690 service nova] Releasing lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.269685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquired lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.269947] env[68233]: DEBUG nova.network.neutron [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.306630] env[68233]: DEBUG nova.network.neutron [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Successfully created port: 4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 788.357648] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.358459] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd101327-eca4-4349-8d16-e269e70ae25c tempest-VolumesAssistedSnapshotsTest-1511466553 tempest-VolumesAssistedSnapshotsTest-1511466553-project-admin] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.310s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.680384] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782246, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083701} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.680722] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.681513] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-669947cc-def8-4e92-8f89-c3c6816f8f2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.705911] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26/48270554-abe4-4f72-b8b9-5f2de6a9ed26.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 788.708412] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39d49c65-6a59-417a-9b12-378c0b68f0ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.728294] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 788.728294] env[68233]: value = "task-2782248" [ 788.728294] env[68233]: _type = "Task" [ 788.728294] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.741694] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52411bba-4f17-b8a9-ee54-86cac98c5833, 'name': SearchDatastore_Task, 'duration_secs': 0.015247} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.744880] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.745180] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 788.745446] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782248, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.748057] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90b43cf5-e016-4104-8091-aedc2ff5d047 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.753915] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 788.753915] env[68233]: value = "task-2782249" [ 788.753915] env[68233]: _type = "Task" [ 788.753915] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.762053] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.857923] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a188a2df-511c-4399-bb8b-a2a78d6c0b03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.864946] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5021810-2c33-40dd-a8c2-7b9d47663be9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.898537] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd25d6fd-0f27-428e-b3dd-4b4fd0e8de8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.907188] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c50ce1b8-e49c-4643-8229-2a6ab811b00b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.920594] env[68233]: DEBUG nova.compute.provider_tree [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.997747] env[68233]: DEBUG nova.network.neutron [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [{"id": "f6820154-58d1-40c2-b9d9-eefe21708836", "address": "fa:16:3e:19:5e:0c", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6820154-58", "ovs_interfaceid": "f6820154-58d1-40c2-b9d9-eefe21708836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.240072] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782248, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.271989] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782249, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.276349] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 789.313295] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 789.313295] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 789.313434] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 789.313610] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 789.313810] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 789.313974] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 789.314313] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 789.314426] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 789.314625] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 789.314842] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 789.315061] env[68233]: DEBUG nova.virt.hardware [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 789.316102] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837c47b2-b342-4503-ada2-6077649dca37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.326762] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43195cf3-7b24-414b-ad48-26abaaba3fdc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.424371] env[68233]: DEBUG nova.scheduler.client.report [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 789.501672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Releasing lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.739374] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782248, 'name': ReconfigVM_Task, 'duration_secs': 0.67149} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.739651] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26/48270554-abe4-4f72-b8b9-5f2de6a9ed26.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.739964] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68233) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 789.740624] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-89139560-8fcd-48dc-a66b-2ecea2e7e245 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.746739] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 789.746739] env[68233]: value = "task-2782250" [ 789.746739] env[68233]: _type = "Task" [ 789.746739] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.754669] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782250, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.762393] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587845} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.762656] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 789.762894] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 789.763165] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0ccdefe-013b-4427-88d4-a5d040755dc3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.772083] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 789.772083] env[68233]: value = "task-2782251" [ 789.772083] env[68233]: _type = "Task" [ 789.772083] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.778723] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782251, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.840162] env[68233]: DEBUG nova.compute.manager [req-56e68380-f7f7-412c-8c3d-b41181c4386b req-d7930d10-468a-4e35-8a72-602cc068ef5d service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Received event network-vif-plugged-4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 789.840162] env[68233]: DEBUG oslo_concurrency.lockutils [req-56e68380-f7f7-412c-8c3d-b41181c4386b req-d7930d10-468a-4e35-8a72-602cc068ef5d service nova] Acquiring lock "09e4644d-d845-47f4-8748-925f739863b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.840162] env[68233]: DEBUG oslo_concurrency.lockutils [req-56e68380-f7f7-412c-8c3d-b41181c4386b req-d7930d10-468a-4e35-8a72-602cc068ef5d service nova] Lock "09e4644d-d845-47f4-8748-925f739863b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.840162] env[68233]: DEBUG oslo_concurrency.lockutils [req-56e68380-f7f7-412c-8c3d-b41181c4386b req-d7930d10-468a-4e35-8a72-602cc068ef5d service nova] Lock "09e4644d-d845-47f4-8748-925f739863b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.840162] env[68233]: DEBUG nova.compute.manager [req-56e68380-f7f7-412c-8c3d-b41181c4386b req-d7930d10-468a-4e35-8a72-602cc068ef5d service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] No waiting events found dispatching network-vif-plugged-4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 789.841416] env[68233]: WARNING nova.compute.manager [req-56e68380-f7f7-412c-8c3d-b41181c4386b req-d7930d10-468a-4e35-8a72-602cc068ef5d service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Received unexpected event network-vif-plugged-4b6356d5-3b4a-4a8f-951e-f7457bcafd9f for instance with vm_state building and task_state spawning. [ 789.932765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.681s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.932765] env[68233]: DEBUG nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 789.935509] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.125s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.935509] env[68233]: DEBUG nova.objects.instance [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lazy-loading 'resources' on Instance uuid 3c9b701e-6461-45e3-8654-3291c5a487b9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 789.989061] env[68233]: DEBUG nova.network.neutron [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Successfully updated port: 4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.006551] env[68233]: DEBUG nova.compute.manager [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 790.007616] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41747db-0049-4c4c-a54a-e966f8b38ccf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.255653] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782250, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.047926} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.255923] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68233) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 790.256775] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2203fd58-4594-4bd7-8e2d-2337a883f755 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.281439] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26/ephemeral_0.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.285270] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56620d7a-6dca-4efa-9df2-db136b7bf80c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.304303] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782251, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.095286} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.305471] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.305859] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 790.305859] env[68233]: value = "task-2782252" [ 790.305859] env[68233]: _type = "Task" [ 790.305859] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.306752] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f43a160-9584-4111-a848-39d1144d7a1d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.329070] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.332928] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19c54d4b-c9c6-4fa1-8cf0-1099cb39d93f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.347171] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782252, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.352333] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 790.352333] env[68233]: value = "task-2782253" [ 790.352333] env[68233]: _type = "Task" [ 790.352333] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.360516] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.438548] env[68233]: DEBUG nova.compute.utils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.442425] env[68233]: DEBUG nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 790.494243] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "refresh_cache-09e4644d-d845-47f4-8748-925f739863b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.494243] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquired lock "refresh_cache-09e4644d-d845-47f4-8748-925f739863b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.494243] env[68233]: DEBUG nova.network.neutron [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 790.819790] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782252, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.862032] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782253, 'name': ReconfigVM_Task, 'duration_secs': 0.504545} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.862032] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 790.862677] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75f5fdac-0bf9-493e-8b5c-2ae874b2f1ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.868732] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 790.868732] env[68233]: value = "task-2782254" [ 790.868732] env[68233]: _type = "Task" [ 790.868732] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.879279] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782254, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.898039] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.898564] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.898900] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.899249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.899583] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.901749] env[68233]: INFO nova.compute.manager [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Terminating instance [ 790.944667] env[68233]: DEBUG nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.025483] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d660095e-5aae-4122-a9b1-2f6923c9da0f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.034258] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Doing hard reboot of VM {{(pid=68233) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 791.035051] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-028f9e88-449b-4a42-bea2-a638de070101 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.041830] env[68233]: DEBUG oslo_vmware.api [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 791.041830] env[68233]: value = "task-2782255" [ 791.041830] env[68233]: _type = "Task" [ 791.041830] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.050604] env[68233]: DEBUG oslo_vmware.api [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782255, 'name': ResetVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.054591] env[68233]: DEBUG nova.network.neutron [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.081716] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0c0d1e-0313-429f-b8bb-da5637198f2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.089556] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6429781-cf9a-412e-a960-9bb8f13ef8e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.124222] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25ff774-caf0-4ec7-b75e-b4410092f195 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.133239] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcba5296-4efc-44e0-889c-5536e09bd866 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.149521] env[68233]: DEBUG nova.compute.provider_tree [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.255292] env[68233]: DEBUG nova.network.neutron [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Updating instance_info_cache with network_info: [{"id": "4b6356d5-3b4a-4a8f-951e-f7457bcafd9f", "address": "fa:16:3e:00:11:d8", "network": {"id": "b41c3e29-73f3-4505-b138-ccb485df064b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-200854309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92a270ac60fd4416900ba7b2c047d559", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b250e561-3be3-4bae-be1a-162251b1ee31", "external-id": "nsx-vlan-transportzone-464", "segmentation_id": 464, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6356d5-3b", "ovs_interfaceid": "4b6356d5-3b4a-4a8f-951e-f7457bcafd9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.318799] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782252, 'name': ReconfigVM_Task, 'duration_secs': 0.883565} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.319116] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Reconfigured VM instance instance-00000033 to attach disk [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26/ephemeral_0.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.319709] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e6bbb53-3b4f-45d5-9542-638ac8387dd9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.325509] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 791.325509] env[68233]: value = "task-2782256" [ 791.325509] env[68233]: _type = "Task" [ 791.325509] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.333197] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782256, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.380030] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782254, 'name': Rename_Task, 'duration_secs': 0.120321} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.380030] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.380030] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3061777-b532-4d54-9b3e-9725ebc834d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.385517] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 791.385517] env[68233]: value = "task-2782257" [ 791.385517] env[68233]: _type = "Task" [ 791.385517] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.394487] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.408439] env[68233]: DEBUG nova.compute.manager [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 791.408439] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 791.408439] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ff36e5-ec42-43cc-a027-37631cefc28f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.414607] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.414870] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4a20aaf-bee6-409a-83ce-33f1a25e130d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.421560] env[68233]: DEBUG oslo_vmware.api [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 791.421560] env[68233]: value = "task-2782258" [ 791.421560] env[68233]: _type = "Task" [ 791.421560] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.429896] env[68233]: DEBUG oslo_vmware.api [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2782258, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.553410] env[68233]: DEBUG oslo_vmware.api [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782255, 'name': ResetVM_Task, 'duration_secs': 0.093824} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.553679] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Did hard reboot of VM {{(pid=68233) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 791.553877] env[68233]: DEBUG nova.compute.manager [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.554692] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c16c768-4a63-4c9d-abed-ba85b38cad60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.654083] env[68233]: DEBUG nova.scheduler.client.report [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 791.758141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Releasing lock "refresh_cache-09e4644d-d845-47f4-8748-925f739863b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.758512] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Instance network_info: |[{"id": "4b6356d5-3b4a-4a8f-951e-f7457bcafd9f", "address": "fa:16:3e:00:11:d8", "network": {"id": "b41c3e29-73f3-4505-b138-ccb485df064b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-200854309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92a270ac60fd4416900ba7b2c047d559", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b250e561-3be3-4bae-be1a-162251b1ee31", "external-id": "nsx-vlan-transportzone-464", "segmentation_id": 464, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6356d5-3b", "ovs_interfaceid": "4b6356d5-3b4a-4a8f-951e-f7457bcafd9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 791.759172] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:11:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b250e561-3be3-4bae-be1a-162251b1ee31', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b6356d5-3b4a-4a8f-951e-f7457bcafd9f', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 791.769609] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Creating folder: Project (92a270ac60fd4416900ba7b2c047d559). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 791.770100] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8732aad1-7180-4455-a964-27ceca560416 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.780658] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Created folder: Project (92a270ac60fd4416900ba7b2c047d559) in parent group-v559223. [ 791.780881] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Creating folder: Instances. Parent ref: group-v559369. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 791.781155] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-594332dd-8563-49ca-a560-4ef653a1165b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.789948] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Created folder: Instances in parent group-v559369. [ 791.790323] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 791.790587] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 791.790901] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec58381a-be67-4f17-9791-3297757b9900 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.819760] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 791.819760] env[68233]: value = "task-2782261" [ 791.819760] env[68233]: _type = "Task" [ 791.819760] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.827828] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782261, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.834938] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782256, 'name': Rename_Task, 'duration_secs': 0.17105} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.835213] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 791.835452] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6e15fbb6-07e6-4b1c-a9b5-897ee8deebf6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.841633] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 791.841633] env[68233]: value = "task-2782262" [ 791.841633] env[68233]: _type = "Task" [ 791.841633] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.848853] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782262, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.884281] env[68233]: DEBUG nova.compute.manager [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Received event network-changed-4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 791.884490] env[68233]: DEBUG nova.compute.manager [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Refreshing instance network info cache due to event network-changed-4b6356d5-3b4a-4a8f-951e-f7457bcafd9f. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 791.884745] env[68233]: DEBUG oslo_concurrency.lockutils [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] Acquiring lock "refresh_cache-09e4644d-d845-47f4-8748-925f739863b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.884938] env[68233]: DEBUG oslo_concurrency.lockutils [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] Acquired lock "refresh_cache-09e4644d-d845-47f4-8748-925f739863b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.885196] env[68233]: DEBUG nova.network.neutron [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Refreshing network info cache for port 4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 791.896045] env[68233]: DEBUG oslo_vmware.api [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782257, 'name': PowerOnVM_Task, 'duration_secs': 0.442808} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.896849] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.897075] env[68233]: INFO nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Took 5.37 seconds to spawn the instance on the hypervisor. [ 791.897268] env[68233]: DEBUG nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.898058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-979840c4-3b4f-4c72-ba2c-046eb1190c8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.930599] env[68233]: DEBUG oslo_vmware.api [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2782258, 'name': PowerOffVM_Task, 'duration_secs': 0.318341} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.930945] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 791.931193] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 791.931397] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b52ccb45-9f59-4b6d-876a-9cf430b34b04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.954050] env[68233]: DEBUG nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 791.983051] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 791.983346] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.983566] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 791.983686] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.983835] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 791.983984] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 791.984222] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 791.984383] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 791.984551] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 791.984711] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 791.984954] env[68233]: DEBUG nova.virt.hardware [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 791.985771] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ea9c3c-3575-480f-ab39-31a9464216b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.995240] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc00e5a-7cfc-40a1-918f-ba91e3d9dcb3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.001206] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 792.001206] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 792.001206] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Deleting the datastore file [datastore2] 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 792.001829] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fb709b3-3e62-4c49-9914-1342889b4a11 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.012309] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.017803] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Creating folder: Project (364cd7bf12a24bca944ca0adf1d28a0c). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.019601] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3abcf9f4-7fb0-4618-87a4-1186ba08413a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.021656] env[68233]: DEBUG oslo_vmware.api [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for the task: (returnval){ [ 792.021656] env[68233]: value = "task-2782264" [ 792.021656] env[68233]: _type = "Task" [ 792.021656] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.028820] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Created folder: Project (364cd7bf12a24bca944ca0adf1d28a0c) in parent group-v559223. [ 792.029038] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Creating folder: Instances. Parent ref: group-v559372. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.032101] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12120eb6-177f-4b2d-a3b6-3c6f849c33e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.033736] env[68233]: DEBUG oslo_vmware.api [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2782264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.039942] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Created folder: Instances in parent group-v559372. [ 792.040258] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.040443] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.040672] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c12454d-9f85-4af8-b5d5-b2585145220e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.057927] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.057927] env[68233]: value = "task-2782267" [ 792.057927] env[68233]: _type = "Task" [ 792.057927] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.066044] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782267, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.075655] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c5633f5d-24b7-4bf9-8350-670084ac2df8 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.370s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.162351] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.165430] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.241s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.166900] env[68233]: INFO nova.compute.claims [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.185769] env[68233]: INFO nova.scheduler.client.report [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Deleted allocations for instance 3c9b701e-6461-45e3-8654-3291c5a487b9 [ 792.329478] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782261, 'name': CreateVM_Task, 'duration_secs': 0.396367} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.329767] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.330946] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.330946] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.330946] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 792.331241] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06de4bb4-4c98-4254-8522-922b98ca6fad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.335963] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 792.335963] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9cdf6-73be-cb4e-cde0-afb78b5f3388" [ 792.335963] env[68233]: _type = "Task" [ 792.335963] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.343941] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9cdf6-73be-cb4e-cde0-afb78b5f3388, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.353447] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782262, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.413823] env[68233]: INFO nova.compute.manager [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Took 37.29 seconds to build instance. [ 792.532083] env[68233]: DEBUG oslo_vmware.api [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Task: {'id': task-2782264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192148} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.532401] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 792.532638] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 792.532817] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 792.533065] env[68233]: INFO nova.compute.manager [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 792.533395] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.533580] env[68233]: DEBUG nova.compute.manager [-] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 792.533704] env[68233]: DEBUG nova.network.neutron [-] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 792.570430] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782267, 'name': CreateVM_Task, 'duration_secs': 0.339527} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.570670] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 792.571247] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.697336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-930f183d-66ee-489f-ae87-60b2093ee8b9 tempest-ServersTestBootFromVolume-705875082 tempest-ServersTestBootFromVolume-705875082-project-member] Lock "3c9b701e-6461-45e3-8654-3291c5a487b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.469s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.857952] env[68233]: DEBUG oslo_vmware.api [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782262, 'name': PowerOnVM_Task, 'duration_secs': 0.771374} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.864837] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 792.865370] env[68233]: INFO nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Took 11.01 seconds to spawn the instance on the hypervisor. [ 792.865672] env[68233]: DEBUG nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 792.866091] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9cdf6-73be-cb4e-cde0-afb78b5f3388, 'name': SearchDatastore_Task, 'duration_secs': 0.013928} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.867311] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dd4741-4085-4837-9980-688df213aa98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.869801] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.870276] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.870645] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.870858] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.871171] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 792.872067] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.872465] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 792.872774] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a49a032-add6-4cc5-b532-c93eca71e49d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.876910] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc3fe242-59c5-4da7-83be-06a81da16ad3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.887553] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 792.891919] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 792.891919] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbc1820d-1d63-4121-8af7-1fe6d9c3440a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.893242] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 792.893242] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52081695-4506-96a7-ff9e-1a20b19cbac9" [ 792.893242] env[68233]: _type = "Task" [ 792.893242] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.901038] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 792.901038] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5288790a-7a91-5a8c-37f9-3d8e515121f1" [ 792.901038] env[68233]: _type = "Task" [ 792.901038] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.910867] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52081695-4506-96a7-ff9e-1a20b19cbac9, 'name': SearchDatastore_Task, 'duration_secs': 0.010782} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.912515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.912515] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 792.912515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.917062] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd56d78f-0433-41bf-8bcd-c950a8627056 tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "175ced9c-52f6-4577-a010-8fffc2876e6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.628s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.917492] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5288790a-7a91-5a8c-37f9-3d8e515121f1, 'name': SearchDatastore_Task, 'duration_secs': 0.01041} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.919426] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb806fa-68a6-4ad8-a441-7f42b3c6d406 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.928567] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 792.928567] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5264bbed-7a30-ccea-a878-6cc2471a73c7" [ 792.928567] env[68233]: _type = "Task" [ 792.928567] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.938504] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5264bbed-7a30-ccea-a878-6cc2471a73c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.943785] env[68233]: DEBUG nova.network.neutron [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Updated VIF entry in instance network info cache for port 4b6356d5-3b4a-4a8f-951e-f7457bcafd9f. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 792.944138] env[68233]: DEBUG nova.network.neutron [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Updating instance_info_cache with network_info: [{"id": "4b6356d5-3b4a-4a8f-951e-f7457bcafd9f", "address": "fa:16:3e:00:11:d8", "network": {"id": "b41c3e29-73f3-4505-b138-ccb485df064b", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-200854309-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "92a270ac60fd4416900ba7b2c047d559", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b250e561-3be3-4bae-be1a-162251b1ee31", "external-id": "nsx-vlan-transportzone-464", "segmentation_id": 464, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b6356d5-3b", "ovs_interfaceid": "4b6356d5-3b4a-4a8f-951e-f7457bcafd9f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.143141] env[68233]: INFO nova.compute.manager [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Rebuilding instance [ 793.201862] env[68233]: DEBUG nova.compute.manager [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 793.202957] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4df9552-e740-45a9-9a6b-9a48e3e039f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.376810] env[68233]: DEBUG nova.compute.manager [req-70c7b440-5a55-4310-8307-01b7de38fd01 req-8a2ff3b7-1883-4d0a-be2d-7de85493f15d service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Received event network-vif-deleted-cd34c1dc-df6e-4115-b9e9-55df77ee36c9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 793.376810] env[68233]: INFO nova.compute.manager [req-70c7b440-5a55-4310-8307-01b7de38fd01 req-8a2ff3b7-1883-4d0a-be2d-7de85493f15d service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Neutron deleted interface cd34c1dc-df6e-4115-b9e9-55df77ee36c9; detaching it from the instance and deleting it from the info cache [ 793.376810] env[68233]: DEBUG nova.network.neutron [req-70c7b440-5a55-4310-8307-01b7de38fd01 req-8a2ff3b7-1883-4d0a-be2d-7de85493f15d service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.401325] env[68233]: INFO nova.compute.manager [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Took 43.60 seconds to build instance. [ 793.419860] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 793.440580] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5264bbed-7a30-ccea-a878-6cc2471a73c7, 'name': SearchDatastore_Task, 'duration_secs': 0.012819} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.440871] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.441165] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 09e4644d-d845-47f4-8748-925f739863b9/09e4644d-d845-47f4-8748-925f739863b9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 793.442077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.442077] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 793.442077] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58326677-c047-40e5-9a56-93f2c3f28489 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.446267] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-565e0e4e-bec7-45fa-bad8-b3fd4ce4def2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.448514] env[68233]: DEBUG oslo_concurrency.lockutils [req-10e17b85-b38d-428e-95f6-74dd12c3c264 req-616c20c7-c962-4cb2-aa2c-ff149b6380d1 service nova] Releasing lock "refresh_cache-09e4644d-d845-47f4-8748-925f739863b9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.453607] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 793.453607] env[68233]: value = "task-2782268" [ 793.453607] env[68233]: _type = "Task" [ 793.453607] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.458225] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 793.458410] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 793.459693] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13ba88aa-8c28-4dea-a576-035980bfa0d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.465343] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782268, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.469359] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 793.469359] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d8ac5-a231-88d8-6b00-a40e830631d1" [ 793.469359] env[68233]: _type = "Task" [ 793.469359] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.477196] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d8ac5-a231-88d8-6b00-a40e830631d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.532326] env[68233]: DEBUG nova.network.neutron [-] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.813025] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01381e8d-8a7a-4b09-8ff6-2b386952d8cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.820534] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dee8a6-26d8-4374-95ee-7becc3fc3205 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.850734] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.850983] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.851203] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "135c2d22-26ac-41a4-a860-accc12dd4c9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.851388] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.851558] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.853856] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e207fdb-51c9-40ca-b0eb-9995319a1bd8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.857087] env[68233]: INFO nova.compute.manager [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Terminating instance [ 793.863731] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe391df7-75a3-4ac8-9190-f5f0c8bec8c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.879974] env[68233]: DEBUG nova.compute.provider_tree [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.881245] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4414991-108f-4212-a2e7-3b66da3d1bd7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.890385] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f8924b-fa9a-40e8-b429-c040ddce9810 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.904232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-437faa7d-a938-4cdb-aa5b-eaa495c86a17 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.277s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.912745] env[68233]: DEBUG nova.compute.manager [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Received event network-changed-f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 793.912958] env[68233]: DEBUG nova.compute.manager [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Refreshing instance network info cache due to event network-changed-f6820154-58d1-40c2-b9d9-eefe21708836. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 793.913197] env[68233]: DEBUG oslo_concurrency.lockutils [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] Acquiring lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.913345] env[68233]: DEBUG oslo_concurrency.lockutils [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] Acquired lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.913509] env[68233]: DEBUG nova.network.neutron [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Refreshing network info cache for port f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 793.936723] env[68233]: DEBUG nova.compute.manager [req-70c7b440-5a55-4310-8307-01b7de38fd01 req-8a2ff3b7-1883-4d0a-be2d-7de85493f15d service nova] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Detach interface failed, port_id=cd34c1dc-df6e-4115-b9e9-55df77ee36c9, reason: Instance 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 793.947944] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.964384] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782268, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.980958] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d8ac5-a231-88d8-6b00-a40e830631d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009382} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.982095] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fc0cb46-dc96-4c0b-9ce0-abab7d62a044 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.989258] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 793.989258] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e92281-435e-81bb-6c28-d14dea4623f6" [ 793.989258] env[68233]: _type = "Task" [ 793.989258] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.002527] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e92281-435e-81bb-6c28-d14dea4623f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.035678] env[68233]: INFO nova.compute.manager [-] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Took 1.50 seconds to deallocate network for instance. [ 794.219028] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.219413] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fee8251-6993-4c26-b3ef-64deff8b4922 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.229722] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 794.229722] env[68233]: value = "task-2782269" [ 794.229722] env[68233]: _type = "Task" [ 794.229722] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.242336] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782269, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.361517] env[68233]: DEBUG nova.compute.manager [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 794.361767] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.362698] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff64a3c5-2374-494a-bae5-9fa03e4c52c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.371636] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 794.371998] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cd679d6-09ce-464c-87e2-275ea5016277 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.378550] env[68233]: DEBUG oslo_vmware.api [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 794.378550] env[68233]: value = "task-2782270" [ 794.378550] env[68233]: _type = "Task" [ 794.378550] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.387156] env[68233]: DEBUG nova.scheduler.client.report [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 794.390497] env[68233]: DEBUG oslo_vmware.api [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782270, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.406408] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 794.467775] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782268, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.869176} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.470562] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 09e4644d-d845-47f4-8748-925f739863b9/09e4644d-d845-47f4-8748-925f739863b9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 794.470943] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 794.471274] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3f01ff66-0efa-46f8-86df-9af700889d09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.478484] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 794.478484] env[68233]: value = "task-2782271" [ 794.478484] env[68233]: _type = "Task" [ 794.478484] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.487904] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782271, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.499537] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e92281-435e-81bb-6c28-d14dea4623f6, 'name': SearchDatastore_Task, 'duration_secs': 0.077287} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.499887] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.500315] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 65f9fe09-97dc-4988-bae4-243d60e33be9/65f9fe09-97dc-4988-bae4-243d60e33be9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 794.500670] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-974df4dc-206d-4454-aa2b-95b4d6d47736 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.506673] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 794.506673] env[68233]: value = "task-2782272" [ 794.506673] env[68233]: _type = "Task" [ 794.506673] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.517840] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782272, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.543220] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.739994] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782269, 'name': PowerOffVM_Task, 'duration_secs': 0.152271} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.739994] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.739994] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 794.740841] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-165b90a1-c0b2-4d55-8604-0f523bcd0b07 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.747464] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.747769] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d764714f-7314-4372-8fa4-dc5a631c7912 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.760707] env[68233]: DEBUG nova.network.neutron [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updated VIF entry in instance network info cache for port f6820154-58d1-40c2-b9d9-eefe21708836. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 794.761092] env[68233]: DEBUG nova.network.neutron [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [{"id": "f6820154-58d1-40c2-b9d9-eefe21708836", "address": "fa:16:3e:19:5e:0c", "network": {"id": "56104e8a-7542-4a85-90a0-646c7ddd7d53", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-279440822-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8ffd47b7024dbd9138d2d6963e1eb4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf6820154-58", "ovs_interfaceid": "f6820154-58d1-40c2-b9d9-eefe21708836", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.775429] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 794.775651] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 794.775830] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Deleting the datastore file [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 794.776719] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-25a35095-217d-4462-8f4b-323cd51093a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.790397] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 794.790397] env[68233]: value = "task-2782274" [ 794.790397] env[68233]: _type = "Task" [ 794.790397] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.795857] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782274, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.890879] env[68233]: DEBUG oslo_vmware.api [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782270, 'name': PowerOffVM_Task, 'duration_secs': 0.229802} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.892730] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 794.892730] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 794.893246] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.728s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.893738] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 794.896400] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4372e8ed-e3d0-42da-8162-57eea6871076 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.898340] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.778s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 794.898558] env[68233]: DEBUG nova.objects.instance [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lazy-loading 'resources' on Instance uuid c8fd5539-8add-45fe-a0ac-8767bf8a330e {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 794.933487] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 794.990953] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782271, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068344} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.991638] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.992679] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c03b8e-b090-4714-9768-47e3b248bf41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.031391] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 09e4644d-d845-47f4-8748-925f739863b9/09e4644d-d845-47f4-8748-925f739863b9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.037333] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13b7dea0-e687-4384-aab6-85d1f0036a4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.053828] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 795.054158] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 795.054386] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Deleting the datastore file [datastore2] 135c2d22-26ac-41a4-a860-accc12dd4c9a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 795.054744] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c994989-9228-4a42-9207-9f266ee9f4dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.066483] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782272, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.069697] env[68233]: DEBUG oslo_vmware.api [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 795.069697] env[68233]: value = "task-2782277" [ 795.069697] env[68233]: _type = "Task" [ 795.069697] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.070140] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 795.070140] env[68233]: value = "task-2782276" [ 795.070140] env[68233]: _type = "Task" [ 795.070140] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.084559] env[68233]: DEBUG oslo_vmware.api [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782277, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.088228] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782276, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.263936] env[68233]: DEBUG oslo_concurrency.lockutils [req-9c61d042-a89f-45e1-bc70-fd4db1d5093a req-dc430300-c941-4d15-8763-e0ec6258f067 service nova] Releasing lock "refresh_cache-135c2d22-26ac-41a4-a860-accc12dd4c9a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.298207] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782274, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.400711} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.298565] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.298816] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.299064] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.400478] env[68233]: DEBUG nova.compute.utils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 795.407254] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 795.407254] env[68233]: DEBUG nova.network.neutron [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 795.425192] env[68233]: DEBUG nova.compute.manager [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Received event network-changed-087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 795.425384] env[68233]: DEBUG nova.compute.manager [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Refreshing instance network info cache due to event network-changed-087c1c8a-5f17-45b3-bcce-2013fb3783d5. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 795.425783] env[68233]: DEBUG oslo_concurrency.lockutils [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] Acquiring lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.425783] env[68233]: DEBUG oslo_concurrency.lockutils [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] Acquired lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.426035] env[68233]: DEBUG nova.network.neutron [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Refreshing network info cache for port 087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.485441] env[68233]: DEBUG nova.policy [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc2c677532ef4ebfa5c8e7357c2d1732', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb0965969ed647038bea9f0388a2df05', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 795.524971] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782272, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649066} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.528092] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 65f9fe09-97dc-4988-bae4-243d60e33be9/65f9fe09-97dc-4988-bae4-243d60e33be9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 795.528165] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 795.528627] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b73d3992-7ca6-4f82-b816-825d5c225b84 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.535897] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 795.535897] env[68233]: value = "task-2782278" [ 795.535897] env[68233]: _type = "Task" [ 795.535897] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.551647] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782278, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.587472] env[68233]: DEBUG oslo_vmware.api [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782277, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311618} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.593219] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 795.593219] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 795.593219] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 795.593219] env[68233]: INFO nova.compute.manager [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Took 1.23 seconds to destroy the instance on the hypervisor. [ 795.593219] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 795.593219] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782276, 'name': ReconfigVM_Task, 'duration_secs': 0.420579} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.593219] env[68233]: DEBUG nova.compute.manager [-] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 795.593219] env[68233]: DEBUG nova.network.neutron [-] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 795.594872] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 09e4644d-d845-47f4-8748-925f739863b9/09e4644d-d845-47f4-8748-925f739863b9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.596420] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c05a77a9-39e4-4cd6-b8b8-dae272c5d4c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.605705] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 795.605705] env[68233]: value = "task-2782279" [ 795.605705] env[68233]: _type = "Task" [ 795.605705] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.620546] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782279, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.907689] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 795.973852] env[68233]: DEBUG nova.network.neutron [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Successfully created port: a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.049902] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782278, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09247} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.054381] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 796.056457] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11fef15-a0cf-4667-aac3-9875686a7df5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.077338] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 65f9fe09-97dc-4988-bae4-243d60e33be9/65f9fe09-97dc-4988-bae4-243d60e33be9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 796.080933] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ac30088-2a13-4978-858a-6b66a6562a3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.099997] env[68233]: DEBUG nova.compute.manager [req-27732ac1-eb8b-4b84-8d6e-d91bb2b215fb req-04c60295-e5b7-4b03-882d-dfbdd221078f service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Received event network-vif-deleted-f6820154-58d1-40c2-b9d9-eefe21708836 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 796.100118] env[68233]: INFO nova.compute.manager [req-27732ac1-eb8b-4b84-8d6e-d91bb2b215fb req-04c60295-e5b7-4b03-882d-dfbdd221078f service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Neutron deleted interface f6820154-58d1-40c2-b9d9-eefe21708836; detaching it from the instance and deleting it from the info cache [ 796.100266] env[68233]: DEBUG nova.network.neutron [req-27732ac1-eb8b-4b84-8d6e-d91bb2b215fb req-04c60295-e5b7-4b03-882d-dfbdd221078f service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.108709] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 796.108709] env[68233]: value = "task-2782280" [ 796.108709] env[68233]: _type = "Task" [ 796.108709] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.125670] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782279, 'name': Rename_Task, 'duration_secs': 0.149487} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.129669] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 796.130362] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.130806] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e23d5b7-e209-48a9-b27a-24c5b88b004f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.134291] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a753f2e-5623-43e5-86c5-41bd4c828f3d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.145810] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ea7d57-2f00-4907-a318-9bd2a1fafb1d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.149338] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 796.149338] env[68233]: value = "task-2782281" [ 796.149338] env[68233]: _type = "Task" [ 796.149338] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.183431] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58af3a65-825b-4270-a006-2aeb0c05f68a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.189295] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782281, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.194623] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3021ce-171a-4e11-9277-b4e49d6e6e48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.209118] env[68233]: DEBUG nova.compute.provider_tree [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.339405] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 796.339659] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.339877] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 796.341860] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.342070] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 796.342234] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 796.342453] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 796.342615] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 796.342786] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 796.342960] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 796.343145] env[68233]: DEBUG nova.virt.hardware [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 796.344144] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96dc00fb-7537-4778-bd25-d4c223ea0764 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.356998] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d63da0-a927-4e27-9e1e-ee3e91da7504 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.370772] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 796.376557] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.377290] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 796.377515] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66dadb7d-1f14-44fc-b124-5a09d710e461 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.389839] env[68233]: DEBUG nova.network.neutron [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Updated VIF entry in instance network info cache for port 087c1c8a-5f17-45b3-bcce-2013fb3783d5. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.390614] env[68233]: DEBUG nova.network.neutron [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Updating instance_info_cache with network_info: [{"id": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "address": "fa:16:3e:2b:50:b4", "network": {"id": "b19f1207-73f4-4941-8267-c4d3b9d5c6fe", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-2109554679-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4520e2c17033482598edf2bbde01450c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ed8a78a1-87dc-488e-a092-afd1c2a2ddde", "external-id": "nsx-vlan-transportzone-21", "segmentation_id": 21, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap087c1c8a-5f", "ovs_interfaceid": "087c1c8a-5f17-45b3-bcce-2013fb3783d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.396573] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 796.396573] env[68233]: value = "task-2782282" [ 796.396573] env[68233]: _type = "Task" [ 796.396573] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.404605] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782282, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.471384] env[68233]: DEBUG nova.network.neutron [-] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.604383] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fa698a2-7dbf-4c78-bdc0-e9eb91a6a9c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.614261] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208ca8ff-ff23-43b2-96ed-8969a307e93b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.638155] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782280, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.659911] env[68233]: DEBUG nova.compute.manager [req-27732ac1-eb8b-4b84-8d6e-d91bb2b215fb req-04c60295-e5b7-4b03-882d-dfbdd221078f service nova] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Detach interface failed, port_id=f6820154-58d1-40c2-b9d9-eefe21708836, reason: Instance 135c2d22-26ac-41a4-a860-accc12dd4c9a could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 796.671164] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782281, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.713027] env[68233]: DEBUG nova.scheduler.client.report [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 796.894876] env[68233]: DEBUG oslo_concurrency.lockutils [req-728e4318-5380-465b-9cab-6b635df611de req-1f548e93-f707-44dd-a42b-38fd87b14a46 service nova] Releasing lock "refresh_cache-48270554-abe4-4f72-b8b9-5f2de6a9ed26" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.907896] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782282, 'name': CreateVM_Task, 'duration_secs': 0.370917} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.908108] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 796.908547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.908706] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.909044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 796.909317] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42a9ef1e-3229-40d6-b5cc-967a09454699 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.913946] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 796.913946] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ec4952-394a-7968-41fd-d6997d059434" [ 796.913946] env[68233]: _type = "Task" [ 796.913946] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.917902] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 796.927876] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ec4952-394a-7968-41fd-d6997d059434, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.946475] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 796.946604] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 796.946746] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 796.946929] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 796.947112] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 796.947274] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 796.947514] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 796.947708] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 796.947950] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 796.948131] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 796.948247] env[68233]: DEBUG nova.virt.hardware [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 796.949213] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac0220b-6e32-4cc2-9d95-68259a02d44b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.956875] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65587f31-78a1-48e2-8ab9-952bf784909e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.974256] env[68233]: INFO nova.compute.manager [-] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Took 1.38 seconds to deallocate network for instance. [ 797.135877] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782280, 'name': ReconfigVM_Task, 'duration_secs': 0.836318} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.136195] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 65f9fe09-97dc-4988-bae4-243d60e33be9/65f9fe09-97dc-4988-bae4-243d60e33be9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 797.136882] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-72c2de5e-af6e-490b-940d-26a41fb5b7cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.143227] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 797.143227] env[68233]: value = "task-2782283" [ 797.143227] env[68233]: _type = "Task" [ 797.143227] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.154032] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782283, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.168777] env[68233]: DEBUG oslo_vmware.api [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782281, 'name': PowerOnVM_Task, 'duration_secs': 0.574787} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.169134] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 797.169414] env[68233]: INFO nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Took 7.89 seconds to spawn the instance on the hypervisor. [ 797.169642] env[68233]: DEBUG nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.170558] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6847dd5c-1101-489a-bd51-e1a1fcd3ecaa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.226627] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.325s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.227327] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.024s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.227709] env[68233]: DEBUG nova.objects.instance [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lazy-loading 'resources' on Instance uuid 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 797.251781] env[68233]: INFO nova.scheduler.client.report [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Deleted allocations for instance c8fd5539-8add-45fe-a0ac-8767bf8a330e [ 797.425650] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ec4952-394a-7968-41fd-d6997d059434, 'name': SearchDatastore_Task, 'duration_secs': 0.015744} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.425949] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.426196] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 797.426433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.426585] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.426767] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 797.427122] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6bb9798-2003-4841-8bc3-f8436a239641 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.435947] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 797.436181] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 797.436841] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bef684af-ce1d-44a9-a06b-8e7023e7fcb9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.442442] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 797.442442] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bd491a-48cd-9706-6bde-3282c106a1b5" [ 797.442442] env[68233]: _type = "Task" [ 797.442442] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.451088] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bd491a-48cd-9706-6bde-3282c106a1b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.480314] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.652884] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782283, 'name': Rename_Task, 'duration_secs': 0.271872} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.653399] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 797.653654] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afd61931-dac4-47bf-86cc-3ff9fcd00ecf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.659915] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 797.659915] env[68233]: value = "task-2782284" [ 797.659915] env[68233]: _type = "Task" [ 797.659915] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.672611] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.689681] env[68233]: DEBUG nova.compute.manager [req-83ebdbb0-19d9-4161-9572-088b46a786e3 req-7a1134bf-920c-499b-a2a8-283941dac831 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Received event network-vif-plugged-a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 797.689903] env[68233]: DEBUG oslo_concurrency.lockutils [req-83ebdbb0-19d9-4161-9572-088b46a786e3 req-7a1134bf-920c-499b-a2a8-283941dac831 service nova] Acquiring lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.690664] env[68233]: DEBUG oslo_concurrency.lockutils [req-83ebdbb0-19d9-4161-9572-088b46a786e3 req-7a1134bf-920c-499b-a2a8-283941dac831 service nova] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.690898] env[68233]: DEBUG oslo_concurrency.lockutils [req-83ebdbb0-19d9-4161-9572-088b46a786e3 req-7a1134bf-920c-499b-a2a8-283941dac831 service nova] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.691115] env[68233]: DEBUG nova.compute.manager [req-83ebdbb0-19d9-4161-9572-088b46a786e3 req-7a1134bf-920c-499b-a2a8-283941dac831 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] No waiting events found dispatching network-vif-plugged-a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 797.691312] env[68233]: WARNING nova.compute.manager [req-83ebdbb0-19d9-4161-9572-088b46a786e3 req-7a1134bf-920c-499b-a2a8-283941dac831 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Received unexpected event network-vif-plugged-a16e0302-632b-4d26-89f7-2d608a6d75f9 for instance with vm_state building and task_state spawning. [ 797.692282] env[68233]: INFO nova.compute.manager [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Took 41.70 seconds to build instance. [ 797.765971] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7c0df8e-509c-401b-9599-806f4ae0a1fe tempest-AttachInterfacesV270Test-1737246151 tempest-AttachInterfacesV270Test-1737246151-project-member] Lock "c8fd5539-8add-45fe-a0ac-8767bf8a330e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.102s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.863365] env[68233]: DEBUG nova.network.neutron [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Successfully updated port: a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 797.955604] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bd491a-48cd-9706-6bde-3282c106a1b5, 'name': SearchDatastore_Task, 'duration_secs': 0.020225} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.960274] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e421f4d3-d7c7-4f7c-9cbd-e7df39106a94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.968458] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 797.968458] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d3e092-7264-3a78-6593-0688d9deda22" [ 797.968458] env[68233]: _type = "Task" [ 797.968458] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.980469] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d3e092-7264-3a78-6593-0688d9deda22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.176584] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782284, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.195379] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85205408-d15f-4e28-8447-6ff91685764f tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "09e4644d-d845-47f4-8748-925f739863b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.739s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.367715] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "refresh_cache-b5e9ef73-2203-42b4-bee0-76d439ffaa17" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.367715] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired lock "refresh_cache-b5e9ef73-2203-42b4-bee0-76d439ffaa17" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.367715] env[68233]: DEBUG nova.network.neutron [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 798.478476] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3014592a-430a-40fa-890a-31217731568e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.490732] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e63596-1d8c-47ab-82d3-2573609593ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.494997] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d3e092-7264-3a78-6593-0688d9deda22, 'name': SearchDatastore_Task, 'duration_secs': 0.019859} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.495096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.495296] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 798.495880] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14d36d4c-d798-459a-8a9a-2f370761939b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.527182] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3067125e-acc0-4ac6-8943-ef062f26d210 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.530505] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 798.530505] env[68233]: value = "task-2782285" [ 798.530505] env[68233]: _type = "Task" [ 798.530505] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.538085] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d01f78-3d26-4101-b247-a974c30dc7ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.546144] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.557141] env[68233]: DEBUG nova.compute.provider_tree [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.670539] env[68233]: DEBUG oslo_vmware.api [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782284, 'name': PowerOnVM_Task, 'duration_secs': 0.584693} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.671019] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 798.671142] env[68233]: INFO nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Took 6.72 seconds to spawn the instance on the hypervisor. [ 798.671250] env[68233]: DEBUG nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 798.672058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1f40f2-ce5f-45f8-948c-981f2a0eac92 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.702684] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.905692] env[68233]: DEBUG nova.network.neutron [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 799.040990] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782285, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.062962] env[68233]: DEBUG nova.scheduler.client.report [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.071154] env[68233]: DEBUG nova.network.neutron [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Updating instance_info_cache with network_info: [{"id": "a16e0302-632b-4d26-89f7-2d608a6d75f9", "address": "fa:16:3e:3c:14:6d", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16e0302-63", "ovs_interfaceid": "a16e0302-632b-4d26-89f7-2d608a6d75f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.193874] env[68233]: INFO nova.compute.manager [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Took 40.45 seconds to build instance. [ 799.228016] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.549568] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540749} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.550446] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 799.551435] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 799.551973] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9beaa19e-395f-4364-82f8-4ba0c07a339b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.562023] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 799.562023] env[68233]: value = "task-2782286" [ 799.562023] env[68233]: _type = "Task" [ 799.562023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.571093] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.344s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.574624] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Releasing lock "refresh_cache-b5e9ef73-2203-42b4-bee0-76d439ffaa17" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.575100] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Instance network_info: |[{"id": "a16e0302-632b-4d26-89f7-2d608a6d75f9", "address": "fa:16:3e:3c:14:6d", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16e0302-63", "ovs_interfaceid": "a16e0302-632b-4d26-89f7-2d608a6d75f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 799.578019] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782286, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.578019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.985s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.578019] env[68233]: DEBUG nova.objects.instance [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lazy-loading 'resources' on Instance uuid 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 799.578019] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:14:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a16e0302-632b-4d26-89f7-2d608a6d75f9', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.586737] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 799.590765] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.591619] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53c6452c-667f-4dd7-b936-b3e27a6d4f85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.611314] env[68233]: INFO nova.scheduler.client.report [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Deleted allocations for instance 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd [ 799.624623] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.624623] env[68233]: value = "task-2782287" [ 799.624623] env[68233]: _type = "Task" [ 799.624623] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.634349] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782287, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.697112] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f99a44a-5895-4391-915f-f0ffe92e5eca tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "65f9fe09-97dc-4988-bae4-243d60e33be9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.889s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.070728] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782286, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142461} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.070728] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 800.071427] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e507f9c-cd2f-463a-bc33-bdeedbf71127 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.077175] env[68233]: DEBUG nova.compute.manager [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Received event network-changed-a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 800.077175] env[68233]: DEBUG nova.compute.manager [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Refreshing instance network info cache due to event network-changed-a16e0302-632b-4d26-89f7-2d608a6d75f9. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 800.077675] env[68233]: DEBUG oslo_concurrency.lockutils [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] Acquiring lock "refresh_cache-b5e9ef73-2203-42b4-bee0-76d439ffaa17" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.077675] env[68233]: DEBUG oslo_concurrency.lockutils [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] Acquired lock "refresh_cache-b5e9ef73-2203-42b4-bee0-76d439ffaa17" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.077675] env[68233]: DEBUG nova.network.neutron [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Refreshing network info cache for port a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.098826] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 800.102954] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d39ae0a-c81b-4af9-940d-b813f0d053a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.128683] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c34139f-58e1-4aeb-b62b-0880e55eb9b2 tempest-FloatingIPsAssociationNegativeTestJSON-415825308 tempest-FloatingIPsAssociationNegativeTestJSON-415825308-project-member] Lock "6c34d7ce-7bf1-4f88-812f-adc1eb5353dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.671s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.133851] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 800.133851] env[68233]: value = "task-2782288" [ 800.133851] env[68233]: _type = "Task" [ 800.133851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.140549] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782287, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.148469] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782288, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.201274] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.312257] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017bd6ca-3616-4fd1-9688-9d478c210646 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.325631] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c21a507-1ff0-486d-b306-518956a981a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.360883] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9bbb3c-c1c7-4d78-8361-5b643dcc64ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.366690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.367178] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.374641] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d39e0bb-67bf-4394-b357-2a4c23fb53af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.392669] env[68233]: DEBUG nova.compute.provider_tree [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.637894] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782287, 'name': CreateVM_Task, 'duration_secs': 0.690803} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.644548] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 800.644680] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.644878] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.645265] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 800.646111] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6453b5b-6007-4e9e-a022-b78a9b743b57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.651290] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782288, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.655045] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 800.655045] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5267f11a-dcf5-f51d-da23-f30a66fca5ec" [ 800.655045] env[68233]: _type = "Task" [ 800.655045] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.664907] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5267f11a-dcf5-f51d-da23-f30a66fca5ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.736325] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.845813] env[68233]: DEBUG nova.network.neutron [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Updated VIF entry in instance network info cache for port a16e0302-632b-4d26-89f7-2d608a6d75f9. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 800.845813] env[68233]: DEBUG nova.network.neutron [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Updating instance_info_cache with network_info: [{"id": "a16e0302-632b-4d26-89f7-2d608a6d75f9", "address": "fa:16:3e:3c:14:6d", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.180", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa16e0302-63", "ovs_interfaceid": "a16e0302-632b-4d26-89f7-2d608a6d75f9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.896725] env[68233]: DEBUG nova.scheduler.client.report [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 801.106876] env[68233]: DEBUG nova.compute.manager [None req-1c7aeaa6-dc57-456f-9964-68883aaea55e tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 801.108652] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534b998f-f7ed-4a37-b582-be685d187bf1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.148728] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782288, 'name': ReconfigVM_Task, 'duration_secs': 0.733855} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.149477] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a/175ced9c-52f6-4577-a010-8fffc2876e6a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 801.149592] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d319f5f-c66f-4df3-a4ee-e199b3aed42c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.156883] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 801.156883] env[68233]: value = "task-2782289" [ 801.156883] env[68233]: _type = "Task" [ 801.156883] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.169615] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5267f11a-dcf5-f51d-da23-f30a66fca5ec, 'name': SearchDatastore_Task, 'duration_secs': 0.023441} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.173205] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.173490] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.173738] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.173880] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.174153] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.174425] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782289, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.174655] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2036057f-f66f-46e6-8d96-9fbe56777465 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.185598] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.185823] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.186579] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c42da65-2a79-40d6-b318-bd7d2908c239 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.193243] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 801.193243] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52962f14-798e-c089-6f35-d393d9ec88d0" [ 801.193243] env[68233]: _type = "Task" [ 801.193243] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.203632] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52962f14-798e-c089-6f35-d393d9ec88d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.324484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "65f9fe09-97dc-4988-bae4-243d60e33be9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.324655] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "65f9fe09-97dc-4988-bae4-243d60e33be9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.324868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "65f9fe09-97dc-4988-bae4-243d60e33be9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.325067] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "65f9fe09-97dc-4988-bae4-243d60e33be9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.325242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "65f9fe09-97dc-4988-bae4-243d60e33be9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.327521] env[68233]: INFO nova.compute.manager [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Terminating instance [ 801.350392] env[68233]: DEBUG oslo_concurrency.lockutils [req-7ab8e88f-c260-4971-862f-2584f60ed69b req-88c9e4a4-604e-44cf-8618-a70fb6e42d37 service nova] Releasing lock "refresh_cache-b5e9ef73-2203-42b4-bee0-76d439ffaa17" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.405230] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.408720] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.888s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.409026] env[68233]: DEBUG nova.objects.instance [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lazy-loading 'resources' on Instance uuid dcd8cca2-b62c-44a6-9e77-f336d2d39c09 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 801.442776] env[68233]: INFO nova.scheduler.client.report [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Deleted allocations for instance 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb [ 801.622710] env[68233]: INFO nova.compute.manager [None req-1c7aeaa6-dc57-456f-9964-68883aaea55e tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] instance snapshotting [ 801.622710] env[68233]: DEBUG nova.objects.instance [None req-1c7aeaa6-dc57-456f-9964-68883aaea55e tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lazy-loading 'flavor' on Instance uuid 65f9fe09-97dc-4988-bae4-243d60e33be9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 801.675707] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782289, 'name': Rename_Task, 'duration_secs': 0.283679} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.675707] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.675707] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e03b73c8-77f9-4d6c-a0e0-e137466c80fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.689529] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 801.689529] env[68233]: value = "task-2782290" [ 801.689529] env[68233]: _type = "Task" [ 801.689529] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.695659] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782290, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.704632] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52962f14-798e-c089-6f35-d393d9ec88d0, 'name': SearchDatastore_Task, 'duration_secs': 0.016021} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.705514] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c2ee0bc-5c8a-47e8-91b2-5e7c80730ecd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.717021] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 801.717021] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c0cdfe-6998-01cb-8e99-fbf36602cb2f" [ 801.717021] env[68233]: _type = "Task" [ 801.717021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.723907] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c0cdfe-6998-01cb-8e99-fbf36602cb2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.832245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "refresh_cache-65f9fe09-97dc-4988-bae4-243d60e33be9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.832448] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquired lock "refresh_cache-65f9fe09-97dc-4988-bae4-243d60e33be9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.832637] env[68233]: DEBUG nova.network.neutron [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 801.957219] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7e04fc67-11b9-4a01-adf6-9d4a5c2eb008 tempest-ServersTestManualDisk-982215780 tempest-ServersTestManualDisk-982215780-project-member] Lock "9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.058s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.133102] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e022ab95-78f8-4b8b-a66d-3d52b52caf89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.159747] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376eb53a-839c-4ca1-965e-a69b9f3e9c66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.202159] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782290, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.227226] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c0cdfe-6998-01cb-8e99-fbf36602cb2f, 'name': SearchDatastore_Task, 'duration_secs': 0.016466} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.232058] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.232364] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] b5e9ef73-2203-42b4-bee0-76d439ffaa17/b5e9ef73-2203-42b4-bee0-76d439ffaa17.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 802.233534] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13f1e2bc-fb02-47db-b5e1-cf3cb37021f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.241344] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 802.241344] env[68233]: value = "task-2782291" [ 802.241344] env[68233]: _type = "Task" [ 802.241344] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.253291] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782291, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.254588] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "09e4644d-d845-47f4-8748-925f739863b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.254801] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "09e4644d-d845-47f4-8748-925f739863b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.255017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "09e4644d-d845-47f4-8748-925f739863b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.255185] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "09e4644d-d845-47f4-8748-925f739863b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.256161] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "09e4644d-d845-47f4-8748-925f739863b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.257568] env[68233]: INFO nova.compute.manager [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Terminating instance [ 802.370656] env[68233]: DEBUG nova.network.neutron [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.533956] env[68233]: DEBUG nova.network.neutron [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.627359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a487d95-3a18-4d16-a705-c65905fcd123 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.636688] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2014f63-9d22-4d68-85d8-dd94dd6f905f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.669886] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246e696a-ed8d-4895-8cf6-e232eb51d0c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.673036] env[68233]: DEBUG nova.compute.manager [None req-1c7aeaa6-dc57-456f-9964-68883aaea55e tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Instance disappeared during snapshot {{(pid=68233) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 802.680764] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ecddeb-a9b7-4f78-8224-8bf699e0ba9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.696589] env[68233]: DEBUG nova.compute.provider_tree [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.711686] env[68233]: DEBUG oslo_vmware.api [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782290, 'name': PowerOnVM_Task, 'duration_secs': 0.84752} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.715176] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.715487] env[68233]: DEBUG nova.compute.manager [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 802.716665] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f341eb31-62d5-4f88-a42f-ace81c87f4af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.753445] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782291, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.763236] env[68233]: DEBUG nova.compute.manager [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.763438] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.764472] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23976a5d-faea-4b60-9424-fec92bf0435d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.773462] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.773856] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5554378d-b33f-46ce-bc87-2653856ccd77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.784048] env[68233]: DEBUG oslo_vmware.api [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 802.784048] env[68233]: value = "task-2782292" [ 802.784048] env[68233]: _type = "Task" [ 802.784048] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.793397] env[68233]: DEBUG oslo_vmware.api [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782292, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.863575] env[68233]: DEBUG nova.compute.manager [None req-1c7aeaa6-dc57-456f-9964-68883aaea55e tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Found 0 images (rotation: 2) {{(pid=68233) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 803.040129] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Releasing lock "refresh_cache-65f9fe09-97dc-4988-bae4-243d60e33be9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.040593] env[68233]: DEBUG nova.compute.manager [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 803.040917] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 803.041979] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6409575-87c7-4f16-85ba-3531f30c1d4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.053744] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 803.054285] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fffc3a01-3128-4e7a-916c-87aa4b0ddbb5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.063609] env[68233]: DEBUG oslo_vmware.api [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 803.063609] env[68233]: value = "task-2782293" [ 803.063609] env[68233]: _type = "Task" [ 803.063609] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.077245] env[68233]: DEBUG oslo_vmware.api [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782293, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.205311] env[68233]: DEBUG nova.scheduler.client.report [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.234291] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 803.252229] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782291, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.841875} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.252488] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] b5e9ef73-2203-42b4-bee0-76d439ffaa17/b5e9ef73-2203-42b4-bee0-76d439ffaa17.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 803.252700] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 803.252945] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbdbb081-5e8c-4314-a4f0-7cef712d6782 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.258837] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 803.258837] env[68233]: value = "task-2782294" [ 803.258837] env[68233]: _type = "Task" [ 803.258837] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.268853] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782294, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.293923] env[68233]: DEBUG oslo_vmware.api [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782292, 'name': PowerOffVM_Task, 'duration_secs': 0.401345} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.294338] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.294541] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.295141] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-935f99c7-e385-4e6f-9a0e-87cbccd1f908 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.379332] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.379490] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.379673] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Deleting the datastore file [datastore2] 09e4644d-d845-47f4-8748-925f739863b9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.379931] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b600525-9994-45b1-adf6-644b07a2d825 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.387124] env[68233]: DEBUG oslo_vmware.api [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for the task: (returnval){ [ 803.387124] env[68233]: value = "task-2782296" [ 803.387124] env[68233]: _type = "Task" [ 803.387124] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.395561] env[68233]: DEBUG oslo_vmware.api [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.573704] env[68233]: DEBUG oslo_vmware.api [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782293, 'name': PowerOffVM_Task, 'duration_secs': 0.161202} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.574081] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 803.574188] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 803.574412] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-54fccd1b-f5e4-4230-bcf7-feb1639d827f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.604416] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 803.605020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 803.605020] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Deleting the datastore file [datastore2] 65f9fe09-97dc-4988-bae4-243d60e33be9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.605195] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f502352f-9fd9-4213-9f41-4846078d4acf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.613657] env[68233]: DEBUG oslo_vmware.api [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for the task: (returnval){ [ 803.613657] env[68233]: value = "task-2782298" [ 803.613657] env[68233]: _type = "Task" [ 803.613657] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.628321] env[68233]: DEBUG oslo_vmware.api [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.710407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.303s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 803.712812] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.071s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 803.719516] env[68233]: INFO nova.compute.claims [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.754939] env[68233]: INFO nova.scheduler.client.report [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleted allocations for instance dcd8cca2-b62c-44a6-9e77-f336d2d39c09 [ 803.768860] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782294, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066486} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.770020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 803.770020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a5ede60-3a13-4d3a-98e8-9d130e47d3a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.794541] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Reconfiguring VM instance instance-00000037 to attach disk [datastore2] b5e9ef73-2203-42b4-bee0-76d439ffaa17/b5e9ef73-2203-42b4-bee0-76d439ffaa17.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 803.796793] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baf85ef3-5557-4af2-b8cb-0b55266887fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.817594] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 803.817594] env[68233]: value = "task-2782299" [ 803.817594] env[68233]: _type = "Task" [ 803.817594] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.826792] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782299, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.898581] env[68233]: DEBUG oslo_vmware.api [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Task: {'id': task-2782296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127805} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.898852] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.899069] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.899267] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.899464] env[68233]: INFO nova.compute.manager [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 803.899709] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.899906] env[68233]: DEBUG nova.compute.manager [-] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.900012] env[68233]: DEBUG nova.network.neutron [-] [instance: 09e4644d-d845-47f4-8748-925f739863b9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.126855] env[68233]: DEBUG oslo_vmware.api [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Task: {'id': task-2782298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090562} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.127355] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 804.127570] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 804.127678] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 804.127796] env[68233]: INFO nova.compute.manager [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Took 1.09 seconds to destroy the instance on the hypervisor. [ 804.128842] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 804.128842] env[68233]: DEBUG nova.compute.manager [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 804.128842] env[68233]: DEBUG nova.network.neutron [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 804.145606] env[68233]: DEBUG nova.network.neutron [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 804.268293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73d71fc4-0731-4092-a97d-41dbdc866a56 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "dcd8cca2-b62c-44a6-9e77-f336d2d39c09" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.912s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.333584] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782299, 'name': ReconfigVM_Task, 'duration_secs': 0.288127} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.333584] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Reconfigured VM instance instance-00000037 to attach disk [datastore2] b5e9ef73-2203-42b4-bee0-76d439ffaa17/b5e9ef73-2203-42b4-bee0-76d439ffaa17.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.334224] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bc515ab-8fd2-4763-ba27-1a667cb39a6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.342145] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 804.342145] env[68233]: value = "task-2782300" [ 804.342145] env[68233]: _type = "Task" [ 804.342145] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.350247] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782300, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.359899] env[68233]: DEBUG nova.compute.manager [req-489a32b9-1ca1-41ba-a290-80bfca3fd08b req-1c0b228a-21bc-42c6-a398-e30f1a481183 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Received event network-vif-deleted-4b6356d5-3b4a-4a8f-951e-f7457bcafd9f {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 804.360034] env[68233]: INFO nova.compute.manager [req-489a32b9-1ca1-41ba-a290-80bfca3fd08b req-1c0b228a-21bc-42c6-a398-e30f1a481183 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Neutron deleted interface 4b6356d5-3b4a-4a8f-951e-f7457bcafd9f; detaching it from the instance and deleting it from the info cache [ 804.361859] env[68233]: DEBUG nova.network.neutron [req-489a32b9-1ca1-41ba-a290-80bfca3fd08b req-1c0b228a-21bc-42c6-a398-e30f1a481183 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.536338] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "175ced9c-52f6-4577-a010-8fffc2876e6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.536815] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "175ced9c-52f6-4577-a010-8fffc2876e6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.536815] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "175ced9c-52f6-4577-a010-8fffc2876e6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 804.537701] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "175ced9c-52f6-4577-a010-8fffc2876e6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.537701] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "175ced9c-52f6-4577-a010-8fffc2876e6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.539756] env[68233]: INFO nova.compute.manager [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Terminating instance [ 804.650099] env[68233]: DEBUG nova.network.neutron [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.783627] env[68233]: DEBUG nova.network.neutron [-] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.858424] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782300, 'name': Rename_Task, 'duration_secs': 0.143944} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.858671] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.859948] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20935ab4-f232-4eb7-9015-fc4136318e55 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.868482] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57fa6417-1f58-4843-ad2b-78302384e9d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.870074] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 804.870074] env[68233]: value = "task-2782301" [ 804.870074] env[68233]: _type = "Task" [ 804.870074] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.879472] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b260ffb-4331-48fa-9eb1-7d5008681fc6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.903324] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782301, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.933247] env[68233]: DEBUG nova.compute.manager [req-489a32b9-1ca1-41ba-a290-80bfca3fd08b req-1c0b228a-21bc-42c6-a398-e30f1a481183 service nova] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Detach interface failed, port_id=4b6356d5-3b4a-4a8f-951e-f7457bcafd9f, reason: Instance 09e4644d-d845-47f4-8748-925f739863b9 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 805.045463] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "refresh_cache-175ced9c-52f6-4577-a010-8fffc2876e6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.046043] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquired lock "refresh_cache-175ced9c-52f6-4577-a010-8fffc2876e6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.046179] env[68233]: DEBUG nova.network.neutron [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.153434] env[68233]: INFO nova.compute.manager [-] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Took 1.02 seconds to deallocate network for instance. [ 805.288759] env[68233]: INFO nova.compute.manager [-] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Took 1.39 seconds to deallocate network for instance. [ 805.359460] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.359724] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.387225] env[68233]: DEBUG oslo_vmware.api [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782301, 'name': PowerOnVM_Task, 'duration_secs': 0.490971} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.387997] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.387997] env[68233]: INFO nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Took 8.47 seconds to spawn the instance on the hypervisor. [ 805.387997] env[68233]: DEBUG nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.388686] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f98824-25e4-4aff-b2b3-649f59970f9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.458196] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c84f5c-2b11-412c-8b7c-805ad4587082 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.469068] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7265927b-e4a4-43b4-a0f4-ebd441c276bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.501181] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96c9c3f-8721-4550-a925-4b6431c256dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.509682] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fa6065-9971-46db-bc56-8a3a0c0b19bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.523519] env[68233]: DEBUG nova.compute.provider_tree [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.589920] env[68233]: DEBUG nova.network.neutron [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 805.660595] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.685103] env[68233]: DEBUG nova.network.neutron [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.797659] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.867657] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "22c06baf-6316-4531-8037-b8b77c401596" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.867657] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "22c06baf-6316-4531-8037-b8b77c401596" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.910732] env[68233]: INFO nova.compute.manager [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Took 40.00 seconds to build instance. [ 806.029621] env[68233]: DEBUG nova.scheduler.client.report [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 806.189968] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Releasing lock "refresh_cache-175ced9c-52f6-4577-a010-8fffc2876e6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.189968] env[68233]: DEBUG nova.compute.manager [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 806.189968] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 806.189968] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7347466-b39b-4ed2-846b-2648df77b318 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.200486] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 806.201595] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2fc39d7-6905-4d01-8ede-5e4641bee0c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.210314] env[68233]: DEBUG oslo_vmware.api [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 806.210314] env[68233]: value = "task-2782302" [ 806.210314] env[68233]: _type = "Task" [ 806.210314] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.222042] env[68233]: DEBUG oslo_vmware.api [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.306295] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01fb6b15-64f7-4a3d-a9c0-9e595cd868a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.314076] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Suspending the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 806.314348] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ebe8f54b-d236-4a11-ae7b-52c5053c8d0b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.320807] env[68233]: DEBUG oslo_vmware.api [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] Waiting for the task: (returnval){ [ 806.320807] env[68233]: value = "task-2782303" [ 806.320807] env[68233]: _type = "Task" [ 806.320807] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.330319] env[68233]: DEBUG oslo_vmware.api [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] Task: {'id': task-2782303, 'name': SuspendVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.413849] env[68233]: DEBUG oslo_concurrency.lockutils [None req-976bf5d9-fc94-46fb-9782-0c88da1b2911 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.340s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.541072] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.828s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.541775] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 806.544644] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.404s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.546486] env[68233]: INFO nova.compute.claims [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.719965] env[68233]: DEBUG oslo_vmware.api [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782302, 'name': PowerOffVM_Task, 'duration_secs': 0.153659} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.720372] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 806.720788] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 806.721160] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abf5c7f0-4e10-4ca3-94e7-787468706324 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.749745] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 806.749981] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 806.750188] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Deleting the datastore file [datastore2] 175ced9c-52f6-4577-a010-8fffc2876e6a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 806.750799] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-794f1d1e-800a-4fda-8ed1-1d8d2cd9f668 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.760047] env[68233]: DEBUG oslo_vmware.api [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for the task: (returnval){ [ 806.760047] env[68233]: value = "task-2782305" [ 806.760047] env[68233]: _type = "Task" [ 806.760047] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.767062] env[68233]: DEBUG oslo_vmware.api [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782305, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.834117] env[68233]: DEBUG oslo_vmware.api [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] Task: {'id': task-2782303, 'name': SuspendVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.917193] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 807.052129] env[68233]: DEBUG nova.compute.utils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 807.056316] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 807.056316] env[68233]: DEBUG nova.network.neutron [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 807.134419] env[68233]: DEBUG nova.policy [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b03c8b07e7414800bbc5dcd684da1cf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29cbdc05b9f04210b5d5c5dd6700e276', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 807.268431] env[68233]: DEBUG oslo_vmware.api [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Task: {'id': task-2782305, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124391} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.268716] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 807.268920] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 807.269331] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 807.269331] env[68233]: INFO nova.compute.manager [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Took 1.08 seconds to destroy the instance on the hypervisor. [ 807.269650] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.269748] env[68233]: DEBUG nova.compute.manager [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 807.269836] env[68233]: DEBUG nova.network.neutron [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 807.294054] env[68233]: DEBUG nova.network.neutron [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 807.331839] env[68233]: DEBUG oslo_vmware.api [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] Task: {'id': task-2782303, 'name': SuspendVM_Task, 'duration_secs': 0.643334} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.332182] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Suspended the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 807.333722] env[68233]: DEBUG nova.compute.manager [None req-18a12c3d-faca-443b-9938-ddabe0ceee08 tempest-ServersAdminNegativeTestJSON-993104784 tempest-ServersAdminNegativeTestJSON-993104784-project-admin] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.333722] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54174415-e499-43f4-a0e8-636998dded9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.446197] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.455244] env[68233]: DEBUG nova.network.neutron [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Successfully created port: 4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 807.566896] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 807.800669] env[68233]: DEBUG nova.network.neutron [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.177707] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adcd87b7-b36e-4ef2-b7e9-649f9d249a82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.185742] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b306dda-08ee-4930-9130-1f6f91cc5b69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.217453] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe86b78-8784-4377-9549-3b3fa9c3f0b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.225221] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef7c49d-0e74-4fc8-b767-fda6e86ee0ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.240771] env[68233]: DEBUG nova.compute.provider_tree [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.308060] env[68233]: INFO nova.compute.manager [-] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Took 1.04 seconds to deallocate network for instance. [ 808.584087] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 808.610311] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.610956] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.611183] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.611410] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.611602] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.611826] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.612098] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.612309] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.612523] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.612733] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.612955] env[68233]: DEBUG nova.virt.hardware [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.613896] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65389f9e-4ab2-4fba-8459-1ec26df20fe4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.623041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613939a1-db72-4f77-a74f-d8d254adf48e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.745011] env[68233]: DEBUG nova.scheduler.client.report [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.817792] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.900768] env[68233]: DEBUG nova.compute.manager [req-bc7e4f6c-2bec-4548-adb8-03b171d1f357 req-f4294b8d-4334-494e-9294-e25969c87798 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Received event network-vif-plugged-4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 808.900928] env[68233]: DEBUG oslo_concurrency.lockutils [req-bc7e4f6c-2bec-4548-adb8-03b171d1f357 req-f4294b8d-4334-494e-9294-e25969c87798 service nova] Acquiring lock "28af332b-4f9b-4474-afdc-ab17e92df6e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.901073] env[68233]: DEBUG oslo_concurrency.lockutils [req-bc7e4f6c-2bec-4548-adb8-03b171d1f357 req-f4294b8d-4334-494e-9294-e25969c87798 service nova] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.901248] env[68233]: DEBUG oslo_concurrency.lockutils [req-bc7e4f6c-2bec-4548-adb8-03b171d1f357 req-f4294b8d-4334-494e-9294-e25969c87798 service nova] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.901348] env[68233]: DEBUG nova.compute.manager [req-bc7e4f6c-2bec-4548-adb8-03b171d1f357 req-f4294b8d-4334-494e-9294-e25969c87798 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] No waiting events found dispatching network-vif-plugged-4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 808.901517] env[68233]: WARNING nova.compute.manager [req-bc7e4f6c-2bec-4548-adb8-03b171d1f357 req-f4294b8d-4334-494e-9294-e25969c87798 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Received unexpected event network-vif-plugged-4de9e0fc-a208-4d4d-a1db-73da575af588 for instance with vm_state building and task_state spawning. [ 809.011483] env[68233]: DEBUG nova.network.neutron [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Successfully updated port: 4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 809.117343] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.118021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.254753] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.710s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.255129] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 809.257758] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.985s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.258089] env[68233]: DEBUG nova.objects.instance [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lazy-loading 'resources' on Instance uuid 2a88648c-f00d-4d7b-905d-e70c327e248a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.517882] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "refresh_cache-28af332b-4f9b-4474-afdc-ab17e92df6e7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.517986] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquired lock "refresh_cache-28af332b-4f9b-4474-afdc-ab17e92df6e7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 809.518118] env[68233]: DEBUG nova.network.neutron [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 809.764619] env[68233]: DEBUG nova.compute.utils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 809.768062] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 809.768062] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.822372] env[68233]: DEBUG nova.policy [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f77a0fdbc15849c6b1cf9518052a745d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ebf78d36f9e42eca135e60dc7dcc8c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 810.073160] env[68233]: DEBUG nova.network.neutron [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 810.185059] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Successfully created port: 6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.240233] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4677d047-f8dc-4501-be9b-14e6a2222f46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.240468] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.272320] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 810.323175] env[68233]: DEBUG nova.network.neutron [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Updating instance_info_cache with network_info: [{"id": "4de9e0fc-a208-4d4d-a1db-73da575af588", "address": "fa:16:3e:4a:7e:85", "network": {"id": "b34ddc42-3466-4921-b888-945fd29a3689", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-45488879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29cbdc05b9f04210b5d5c5dd6700e276", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4de9e0fc-a2", "ovs_interfaceid": "4de9e0fc-a208-4d4d-a1db-73da575af588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.342028] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148e6228-cfaf-4b27-afe7-57cdb63ce35e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.349367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0241ef1e-cf59-4ae9-82d0-1a13be99362a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.380849] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce22877a-cfff-4dee-aa35-c56800bb6f96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.388590] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6748fd4d-d01a-44ac-8cc4-a91041c68137 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.403488] env[68233]: DEBUG nova.compute.provider_tree [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.420158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.420158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.420158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.420158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.420158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.421045] env[68233]: INFO nova.compute.manager [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Terminating instance [ 810.825184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Releasing lock "refresh_cache-28af332b-4f9b-4474-afdc-ab17e92df6e7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 810.825523] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Instance network_info: |[{"id": "4de9e0fc-a208-4d4d-a1db-73da575af588", "address": "fa:16:3e:4a:7e:85", "network": {"id": "b34ddc42-3466-4921-b888-945fd29a3689", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-45488879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29cbdc05b9f04210b5d5c5dd6700e276", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4de9e0fc-a2", "ovs_interfaceid": "4de9e0fc-a208-4d4d-a1db-73da575af588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 810.826171] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:7e:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19440099-773e-4a31-b82e-84a4daa5d8fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4de9e0fc-a208-4d4d-a1db-73da575af588', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 810.833395] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Creating folder: Project (29cbdc05b9f04210b5d5c5dd6700e276). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.834431] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83ec9ffa-af4b-4c41-8c42-c9f86c4b4ef3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.845390] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Created folder: Project (29cbdc05b9f04210b5d5c5dd6700e276) in parent group-v559223. [ 810.845569] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Creating folder: Instances. Parent ref: group-v559377. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 810.845782] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af819920-9d00-40f8-8f55-cea9ff78b6be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.854311] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Created folder: Instances in parent group-v559377. [ 810.854534] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.854712] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 810.854923] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-744e586f-3a48-4e3e-a6b6-894876b46faa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.872733] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 810.872733] env[68233]: value = "task-2782308" [ 810.872733] env[68233]: _type = "Task" [ 810.872733] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.880130] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782308, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.906690] env[68233]: DEBUG nova.scheduler.client.report [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 810.924606] env[68233]: DEBUG nova.compute.manager [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 810.924771] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 810.925613] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf65714-3ccd-4fd4-acaf-756ea7a7f2f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.934441] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 810.934710] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0bd472b-dd77-48ee-a48e-c16c31ea318b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.938392] env[68233]: DEBUG nova.compute.manager [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Received event network-changed-4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 810.938578] env[68233]: DEBUG nova.compute.manager [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Refreshing instance network info cache due to event network-changed-4de9e0fc-a208-4d4d-a1db-73da575af588. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 810.938811] env[68233]: DEBUG oslo_concurrency.lockutils [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] Acquiring lock "refresh_cache-28af332b-4f9b-4474-afdc-ab17e92df6e7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.938964] env[68233]: DEBUG oslo_concurrency.lockutils [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] Acquired lock "refresh_cache-28af332b-4f9b-4474-afdc-ab17e92df6e7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.939140] env[68233]: DEBUG nova.network.neutron [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Refreshing network info cache for port 4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 811.015365] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 811.018110] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 811.018290] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Deleting the datastore file [datastore2] b5e9ef73-2203-42b4-bee0-76d439ffaa17 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.018771] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2888266-778d-40f5-9f3b-d61e09817255 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.025216] env[68233]: DEBUG oslo_vmware.api [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 811.025216] env[68233]: value = "task-2782310" [ 811.025216] env[68233]: _type = "Task" [ 811.025216] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.033099] env[68233]: DEBUG oslo_vmware.api [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.282555] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 811.308987] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 811.309134] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.309304] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 811.309495] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.309643] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 811.309790] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 811.310009] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 811.310182] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 811.310349] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 811.310512] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 811.310683] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 811.311588] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a88630f-d57b-47f0-9a4f-3579f48bbdcf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.319754] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27766e6a-d199-4afc-86ae-2e42b2462411 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.382379] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782308, 'name': CreateVM_Task, 'duration_secs': 0.343028} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.382562] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 811.383228] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.383396] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.383724] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 811.383970] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f43dc498-d78e-4b95-9982-e882948a113e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.388021] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 811.388021] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7ed14-d18c-48f0-e4a5-377386fd2520" [ 811.388021] env[68233]: _type = "Task" [ 811.388021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.395238] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7ed14-d18c-48f0-e4a5-377386fd2520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.412144] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.154s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.414127] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.226s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.415539] env[68233]: INFO nova.compute.claims [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.433092] env[68233]: INFO nova.scheduler.client.report [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted allocations for instance 2a88648c-f00d-4d7b-905d-e70c327e248a [ 811.534566] env[68233]: DEBUG oslo_vmware.api [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16793} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.537013] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.537247] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 811.537432] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.537605] env[68233]: INFO nova.compute.manager [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Took 0.61 seconds to destroy the instance on the hypervisor. [ 811.537845] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 811.538057] env[68233]: DEBUG nova.compute.manager [-] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 811.538153] env[68233]: DEBUG nova.network.neutron [-] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 811.747160] env[68233]: DEBUG nova.network.neutron [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Updated VIF entry in instance network info cache for port 4de9e0fc-a208-4d4d-a1db-73da575af588. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 811.747458] env[68233]: DEBUG nova.network.neutron [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Updating instance_info_cache with network_info: [{"id": "4de9e0fc-a208-4d4d-a1db-73da575af588", "address": "fa:16:3e:4a:7e:85", "network": {"id": "b34ddc42-3466-4921-b888-945fd29a3689", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-45488879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "29cbdc05b9f04210b5d5c5dd6700e276", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4de9e0fc-a2", "ovs_interfaceid": "4de9e0fc-a208-4d4d-a1db-73da575af588", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.814413] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Successfully updated port: 6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.899117] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7ed14-d18c-48f0-e4a5-377386fd2520, 'name': SearchDatastore_Task, 'duration_secs': 0.00915} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.900291] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 811.900534] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 811.900787] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.900948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.901153] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 811.903137] env[68233]: DEBUG nova.compute.manager [req-4645ef68-8a05-43c5-ad51-4febdb57ec74 req-6d806930-ee07-47be-b1f3-1cb0dc4a5c89 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Received event network-vif-deleted-a16e0302-632b-4d26-89f7-2d608a6d75f9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 811.903137] env[68233]: INFO nova.compute.manager [req-4645ef68-8a05-43c5-ad51-4febdb57ec74 req-6d806930-ee07-47be-b1f3-1cb0dc4a5c89 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Neutron deleted interface a16e0302-632b-4d26-89f7-2d608a6d75f9; detaching it from the instance and deleting it from the info cache [ 811.903137] env[68233]: DEBUG nova.network.neutron [req-4645ef68-8a05-43c5-ad51-4febdb57ec74 req-6d806930-ee07-47be-b1f3-1cb0dc4a5c89 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.904789] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0bcdf00-1409-41c0-b282-bd8b9cee44f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.915725] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 811.915725] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 811.915725] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5270259c-5d6a-45ad-95d2-2c25891090bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.922518] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 811.922518] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52837e28-d809-dfaa-5560-16e57a0334dc" [ 811.922518] env[68233]: _type = "Task" [ 811.922518] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.931951] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52837e28-d809-dfaa-5560-16e57a0334dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.940127] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c510f4ab-67fe-4e26-a352-2a4c3d042905 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "2a88648c-f00d-4d7b-905d-e70c327e248a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.761s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.251084] env[68233]: DEBUG oslo_concurrency.lockutils [req-0bfbefa0-6321-457a-b21d-2e92d2015dd5 req-497d2f7f-0f9d-4da0-9be9-234ef5407a77 service nova] Releasing lock "refresh_cache-28af332b-4f9b-4474-afdc-ab17e92df6e7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.322257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "refresh_cache-64b8997c-3246-4c97-a6c9-3a6a23645d38" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.322567] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "refresh_cache-64b8997c-3246-4c97-a6c9-3a6a23645d38" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.322567] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 812.381425] env[68233]: DEBUG nova.network.neutron [-] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.408478] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-646d7794-7cb7-4ebb-8ec2-8e8e815657b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.418418] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bade7c6e-e289-42a0-9b38-2247bc796be1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.457956] env[68233]: DEBUG nova.compute.manager [req-4645ef68-8a05-43c5-ad51-4febdb57ec74 req-6d806930-ee07-47be-b1f3-1cb0dc4a5c89 service nova] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Detach interface failed, port_id=a16e0302-632b-4d26-89f7-2d608a6d75f9, reason: Instance b5e9ef73-2203-42b4-bee0-76d439ffaa17 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 812.463574] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52837e28-d809-dfaa-5560-16e57a0334dc, 'name': SearchDatastore_Task, 'duration_secs': 0.007971} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.464881] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2ea952b-8c42-428f-9427-a2cc94627578 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.470206] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 812.470206] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52290947-8d03-a8e8-b61e-11231092a871" [ 812.470206] env[68233]: _type = "Task" [ 812.470206] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.478934] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52290947-8d03-a8e8-b61e-11231092a871, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.884512] env[68233]: INFO nova.compute.manager [-] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Took 1.35 seconds to deallocate network for instance. [ 812.892239] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.906036] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d42ea4-16e7-4ac1-82d2-26de222f57ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.913312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c880c857-08ac-48c2-b0b4-938cfd7c5a34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.951161] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2043664-543c-4635-bd4e-1198526d4917 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.960412] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4efc4e-7a6a-4c25-89dd-440d1b1313f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.973894] env[68233]: DEBUG nova.compute.provider_tree [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.983018] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52290947-8d03-a8e8-b61e-11231092a871, 'name': SearchDatastore_Task, 'duration_secs': 0.024152} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.983849] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.984121] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 28af332b-4f9b-4474-afdc-ab17e92df6e7/28af332b-4f9b-4474-afdc-ab17e92df6e7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 812.984366] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c293e5a3-8242-4924-9dd6-3a6b9bedb7e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.990451] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 812.990451] env[68233]: value = "task-2782311" [ 812.990451] env[68233]: _type = "Task" [ 812.990451] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.001022] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.017501] env[68233]: DEBUG nova.compute.manager [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Received event network-vif-plugged-6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 813.017725] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] Acquiring lock "64b8997c-3246-4c97-a6c9-3a6a23645d38-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.017930] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.018108] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.018275] env[68233]: DEBUG nova.compute.manager [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] No waiting events found dispatching network-vif-plugged-6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 813.018436] env[68233]: WARNING nova.compute.manager [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Received unexpected event network-vif-plugged-6d3afe70-2479-4ac4-a335-e5a1f04778a7 for instance with vm_state building and task_state spawning. [ 813.018591] env[68233]: DEBUG nova.compute.manager [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Received event network-changed-6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 813.018740] env[68233]: DEBUG nova.compute.manager [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Refreshing instance network info cache due to event network-changed-6d3afe70-2479-4ac4-a335-e5a1f04778a7. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 813.018901] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] Acquiring lock "refresh_cache-64b8997c-3246-4c97-a6c9-3a6a23645d38" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.145505] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Updating instance_info_cache with network_info: [{"id": "6d3afe70-2479-4ac4-a335-e5a1f04778a7", "address": "fa:16:3e:e7:80:1b", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3afe70-24", "ovs_interfaceid": "6d3afe70-2479-4ac4-a335-e5a1f04778a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.395710] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.480068] env[68233]: DEBUG nova.scheduler.client.report [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.501161] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782311, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447521} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.501405] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 28af332b-4f9b-4474-afdc-ab17e92df6e7/28af332b-4f9b-4474-afdc-ab17e92df6e7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 813.501614] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 813.501846] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a286d45e-c11a-4efc-9cc2-ef8c58bf06af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.507963] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 813.507963] env[68233]: value = "task-2782312" [ 813.507963] env[68233]: _type = "Task" [ 813.507963] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.515038] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.648475] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "refresh_cache-64b8997c-3246-4c97-a6c9-3a6a23645d38" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.648811] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Instance network_info: |[{"id": "6d3afe70-2479-4ac4-a335-e5a1f04778a7", "address": "fa:16:3e:e7:80:1b", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3afe70-24", "ovs_interfaceid": "6d3afe70-2479-4ac4-a335-e5a1f04778a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 813.649154] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] Acquired lock "refresh_cache-64b8997c-3246-4c97-a6c9-3a6a23645d38" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.649347] env[68233]: DEBUG nova.network.neutron [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Refreshing network info cache for port 6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 813.650492] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:80:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d3afe70-2479-4ac4-a335-e5a1f04778a7', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.657820] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Creating folder: Project (2ebf78d36f9e42eca135e60dc7dcc8c2). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.660767] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5f3a0051-8bc7-4296-99cc-47cb83b57b9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.671341] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Created folder: Project (2ebf78d36f9e42eca135e60dc7dcc8c2) in parent group-v559223. [ 813.671463] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Creating folder: Instances. Parent ref: group-v559380. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 813.671681] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d911cd5f-e596-485d-9e3c-2ad988ab9d53 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.680657] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Created folder: Instances in parent group-v559380. [ 813.680901] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 813.681097] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.681291] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6aff5f0-caa8-4583-9447-9c07d14a9422 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.703017] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.703017] env[68233]: value = "task-2782315" [ 813.703017] env[68233]: _type = "Task" [ 813.703017] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.713504] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782315, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.904703] env[68233]: DEBUG nova.network.neutron [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Updated VIF entry in instance network info cache for port 6d3afe70-2479-4ac4-a335-e5a1f04778a7. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.904703] env[68233]: DEBUG nova.network.neutron [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Updating instance_info_cache with network_info: [{"id": "6d3afe70-2479-4ac4-a335-e5a1f04778a7", "address": "fa:16:3e:e7:80:1b", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d3afe70-24", "ovs_interfaceid": "6d3afe70-2479-4ac4-a335-e5a1f04778a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.984621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.985193] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 813.988378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.092s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.988469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.988619] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 813.988901] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 31.031s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.991053] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c8bcb1-b099-4655-b3b4-ef78065b37e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.998957] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e095b647-9229-4413-9aff-401eb94eaced {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.016382] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fd91671-66d2-491d-aefa-69c07d44d50f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.024608] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059409} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.026527] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 814.027561] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df0ec98-9c0a-4331-9776-4d594884be41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.030576] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b11c618-bd27-44ff-b6d2-05b351bc8a70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.055330] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 28af332b-4f9b-4474-afdc-ab17e92df6e7/28af332b-4f9b-4474-afdc-ab17e92df6e7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 814.081383] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-189621b4-9423-4534-94d2-de363e502bdb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.096380] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178573MB free_disk=174GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 814.096570] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.101620] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 814.101620] env[68233]: value = "task-2782316" [ 814.101620] env[68233]: _type = "Task" [ 814.101620] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.108939] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782316, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.213425] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782315, 'name': CreateVM_Task, 'duration_secs': 0.303452} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.213612] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.214398] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.214605] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.215014] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.215301] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d574b1f-2c5c-4ab4-978a-f976f9e9016c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.220203] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 814.220203] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525efed3-4518-23a7-9a0c-0f69f7df61e6" [ 814.220203] env[68233]: _type = "Task" [ 814.220203] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.228203] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525efed3-4518-23a7-9a0c-0f69f7df61e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.407472] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0105bda-b8cc-40a6-922f-d261a5ca8596 req-e1c4da5f-69e7-4fa2-8071-e6c9dcceeeb5 service nova] Releasing lock "refresh_cache-64b8997c-3246-4c97-a6c9-3a6a23645d38" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.492649] env[68233]: DEBUG nova.compute.utils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.494134] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.494308] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.498191] env[68233]: INFO nova.compute.claims [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.550592] env[68233]: DEBUG nova.policy [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f77a0fdbc15849c6b1cf9518052a745d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ebf78d36f9e42eca135e60dc7dcc8c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.610572] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782316, 'name': ReconfigVM_Task, 'duration_secs': 0.274138} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.610843] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 28af332b-4f9b-4474-afdc-ab17e92df6e7/28af332b-4f9b-4474-afdc-ab17e92df6e7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 814.611472] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c576691c-2836-4f72-86aa-b76dbab7038d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.616905] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 814.616905] env[68233]: value = "task-2782317" [ 814.616905] env[68233]: _type = "Task" [ 814.616905] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.624118] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782317, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.731838] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525efed3-4518-23a7-9a0c-0f69f7df61e6, 'name': SearchDatastore_Task, 'duration_secs': 0.008909} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.732171] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.732406] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.732640] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.732786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.732964] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.733327] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ba0b409-466a-4525-9c16-364ee21b4b1d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.741719] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.742301] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.742658] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60c09217-1768-4b54-b10a-f39cafd2bdcb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.747360] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 814.747360] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52278e90-d58a-904f-8791-fe981393f50c" [ 814.747360] env[68233]: _type = "Task" [ 814.747360] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.759997] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52278e90-d58a-904f-8791-fe981393f50c, 'name': SearchDatastore_Task, 'duration_secs': 0.008837} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.766929] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5723a12-9ae2-4d9d-9d00-0f2cc07ed858 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.774525] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 814.774525] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d40c86-3988-3285-bd65-895adcc3cf8b" [ 814.774525] env[68233]: _type = "Task" [ 814.774525] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.787908] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d40c86-3988-3285-bd65-895adcc3cf8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.801915] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Successfully created port: 4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.002842] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.014202] env[68233]: INFO nova.compute.resource_tracker [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating resource usage from migration c46ea15e-6075-47b3-b44f-d79f032a7b76 [ 815.127042] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782317, 'name': Rename_Task, 'duration_secs': 0.144533} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.127326] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 815.127835] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31e08997-a554-4af8-a48d-b6e1307d8fa4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.133895] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 815.133895] env[68233]: value = "task-2782318" [ 815.133895] env[68233]: _type = "Task" [ 815.133895] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.144092] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782318, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.287180] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d40c86-3988-3285-bd65-895adcc3cf8b, 'name': SearchDatastore_Task, 'duration_secs': 0.039557} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.287879] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.287879] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 64b8997c-3246-4c97-a6c9-3a6a23645d38/64b8997c-3246-4c97-a6c9-3a6a23645d38.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.288174] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7111c57b-7848-4e24-aa96-19e16f403a6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.294960] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 815.294960] env[68233]: value = "task-2782319" [ 815.294960] env[68233]: _type = "Task" [ 815.294960] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.306616] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.600800] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9768c93-7c49-400f-bf11-cdbebff9ee3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.610616] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a0ecc2-5ad2-4660-a4d1-ed770b710879 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.651086] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58d97e0-497b-4ba7-b6f3-b4b76224a068 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.662634] env[68233]: DEBUG oslo_vmware.api [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782318, 'name': PowerOnVM_Task, 'duration_secs': 0.500741} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.664009] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a19a38c-1c88-47ae-a043-2998bb0e46aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.669438] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 815.669438] env[68233]: INFO nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Took 7.09 seconds to spawn the instance on the hypervisor. [ 815.669438] env[68233]: DEBUG nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 815.670242] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558786e4-1132-467a-8283-ec1e31c6f210 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.684272] env[68233]: DEBUG nova.compute.provider_tree [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.804842] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466132} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.805123] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 64b8997c-3246-4c97-a6c9-3a6a23645d38/64b8997c-3246-4c97-a6c9-3a6a23645d38.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 815.805378] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.805631] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4411091f-2d35-434d-a4d7-ce719c70cb70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.812258] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 815.812258] env[68233]: value = "task-2782320" [ 815.812258] env[68233]: _type = "Task" [ 815.812258] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.819618] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782320, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.024762] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.050599] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.050920] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.051104] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.051297] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.051446] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.051594] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.051907] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.052270] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.052492] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.052675] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.052864] env[68233]: DEBUG nova.virt.hardware [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.053771] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34397b4-337f-44d0-9287-3b4845cf50f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.062475] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74cbca6-5b72-4302-affc-195db89d454d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.192116] env[68233]: DEBUG nova.scheduler.client.report [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.205055] env[68233]: INFO nova.compute.manager [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Took 40.58 seconds to build instance. [ 816.324806] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782320, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076835} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.328022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.328022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766011b2-fb1a-4d75-891a-89d2b7d7dbfd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.358157] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 64b8997c-3246-4c97-a6c9-3a6a23645d38/64b8997c-3246-4c97-a6c9-3a6a23645d38.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.358476] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-354d59c2-308b-4f60-b736-92e23d9ac9f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.379117] env[68233]: DEBUG nova.compute.manager [req-8a42187c-ac16-49b1-bbac-9b4d5ea06ff9 req-51e55e38-b1c9-4190-98a2-83e52f5019a1 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Received event network-vif-plugged-4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 816.379413] env[68233]: DEBUG oslo_concurrency.lockutils [req-8a42187c-ac16-49b1-bbac-9b4d5ea06ff9 req-51e55e38-b1c9-4190-98a2-83e52f5019a1 service nova] Acquiring lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.379573] env[68233]: DEBUG oslo_concurrency.lockutils [req-8a42187c-ac16-49b1-bbac-9b4d5ea06ff9 req-51e55e38-b1c9-4190-98a2-83e52f5019a1 service nova] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.379778] env[68233]: DEBUG oslo_concurrency.lockutils [req-8a42187c-ac16-49b1-bbac-9b4d5ea06ff9 req-51e55e38-b1c9-4190-98a2-83e52f5019a1 service nova] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.379907] env[68233]: DEBUG nova.compute.manager [req-8a42187c-ac16-49b1-bbac-9b4d5ea06ff9 req-51e55e38-b1c9-4190-98a2-83e52f5019a1 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] No waiting events found dispatching network-vif-plugged-4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 816.380307] env[68233]: WARNING nova.compute.manager [req-8a42187c-ac16-49b1-bbac-9b4d5ea06ff9 req-51e55e38-b1c9-4190-98a2-83e52f5019a1 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Received unexpected event network-vif-plugged-4971febd-c50e-4e4b-88f7-18e159b3e7cb for instance with vm_state building and task_state spawning. [ 816.382555] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 816.382555] env[68233]: value = "task-2782321" [ 816.382555] env[68233]: _type = "Task" [ 816.382555] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.392485] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782321, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.483047] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Successfully updated port: 4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 816.701120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.712s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.701381] env[68233]: INFO nova.compute.manager [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Migrating [ 816.707843] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.937s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.709662] env[68233]: INFO nova.compute.claims [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.712292] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c4f2a12c-958d-4eff-972f-1f36992006c2 tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.380s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.893085] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782321, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.986054] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "refresh_cache-07c7d125-d689-4499-aa4a-b9d3441c6fd0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.986054] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "refresh_cache-07c7d125-d689-4499-aa4a-b9d3441c6fd0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.986217] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.142298] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "28af332b-4f9b-4474-afdc-ab17e92df6e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.142569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.142786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "28af332b-4f9b-4474-afdc-ab17e92df6e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.142967] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.143181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.146785] env[68233]: INFO nova.compute.manager [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Terminating instance [ 817.220584] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.220685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.220945] env[68233]: DEBUG nova.network.neutron [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.222250] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 817.394774] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782321, 'name': ReconfigVM_Task, 'duration_secs': 0.748458} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.395148] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 64b8997c-3246-4c97-a6c9-3a6a23645d38/64b8997c-3246-4c97-a6c9-3a6a23645d38.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.395827] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-356d5485-7d21-4327-b4e8-ced24f6cb874 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.402673] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 817.402673] env[68233]: value = "task-2782322" [ 817.402673] env[68233]: _type = "Task" [ 817.402673] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.410957] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782322, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.541043] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 817.649523] env[68233]: DEBUG nova.compute.manager [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.649771] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.650669] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedb698b-c0e9-49c9-8866-cacb689afbdb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.658610] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.658840] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d82c6155-6b44-485f-b039-9bb36e13447e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.664727] env[68233]: DEBUG oslo_vmware.api [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 817.664727] env[68233]: value = "task-2782323" [ 817.664727] env[68233]: _type = "Task" [ 817.664727] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.672609] env[68233]: DEBUG oslo_vmware.api [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.747733] env[68233]: DEBUG nova.network.neutron [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Updating instance_info_cache with network_info: [{"id": "4971febd-c50e-4e4b-88f7-18e159b3e7cb", "address": "fa:16:3e:fe:4e:5a", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4971febd-c5", "ovs_interfaceid": "4971febd-c50e-4e4b-88f7-18e159b3e7cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.756751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.911583] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782322, 'name': Rename_Task, 'duration_secs': 0.12931} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.912063] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.912168] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23023396-6b40-4b5d-addd-40afe582e39e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.920472] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 817.920472] env[68233]: value = "task-2782324" [ 817.920472] env[68233]: _type = "Task" [ 817.920472] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.929773] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782324, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.952824] env[68233]: DEBUG nova.network.neutron [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.175890] env[68233]: DEBUG oslo_vmware.api [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782323, 'name': PowerOffVM_Task, 'duration_secs': 0.203965} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.178576] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 818.178825] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 818.179329] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-586944a1-8bc3-4274-bbe6-4b08318f1fd0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.241607] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d7d2d99-3f30-479c-b819-5d4fd7ccaa66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.246125] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 818.246349] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 818.246533] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Deleting the datastore file [datastore2] 28af332b-4f9b-4474-afdc-ab17e92df6e7 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 818.246771] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b986df3f-48d4-46e3-b32c-b934ba92cc71 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.251551] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66228f96-7f33-4a79-bae7-a7186c23de56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.255575] env[68233]: DEBUG oslo_vmware.api [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for the task: (returnval){ [ 818.255575] env[68233]: value = "task-2782326" [ 818.255575] env[68233]: _type = "Task" [ 818.255575] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.258914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "refresh_cache-07c7d125-d689-4499-aa4a-b9d3441c6fd0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.259230] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Instance network_info: |[{"id": "4971febd-c50e-4e4b-88f7-18e159b3e7cb", "address": "fa:16:3e:fe:4e:5a", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4971febd-c5", "ovs_interfaceid": "4971febd-c50e-4e4b-88f7-18e159b3e7cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 818.259601] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:4e:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4971febd-c50e-4e4b-88f7-18e159b3e7cb', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 818.268397] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.292713] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 818.296152] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5bef48a-662f-4158-944a-0c9b9252f832 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.315566] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52306ef-3f57-44f4-a3c0-559e629131d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.319811] env[68233]: DEBUG oslo_vmware.api [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.327520] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04769fd0-6c11-4d72-b11d-e8a57414989f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.331266] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 818.331266] env[68233]: value = "task-2782327" [ 818.331266] env[68233]: _type = "Task" [ 818.331266] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.342319] env[68233]: DEBUG nova.compute.provider_tree [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.348518] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782327, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.408532] env[68233]: DEBUG nova.compute.manager [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Received event network-changed-4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 818.408916] env[68233]: DEBUG nova.compute.manager [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Refreshing instance network info cache due to event network-changed-4971febd-c50e-4e4b-88f7-18e159b3e7cb. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 818.409283] env[68233]: DEBUG oslo_concurrency.lockutils [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] Acquiring lock "refresh_cache-07c7d125-d689-4499-aa4a-b9d3441c6fd0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.409543] env[68233]: DEBUG oslo_concurrency.lockutils [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] Acquired lock "refresh_cache-07c7d125-d689-4499-aa4a-b9d3441c6fd0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.409839] env[68233]: DEBUG nova.network.neutron [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Refreshing network info cache for port 4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.431100] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782324, 'name': PowerOnVM_Task, 'duration_secs': 0.437951} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.431321] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.431512] env[68233]: INFO nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Took 7.15 seconds to spawn the instance on the hypervisor. [ 818.431692] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.432496] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1294b793-ad21-40e3-b1e1-700ec2b3b4eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.455184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 818.767733] env[68233]: DEBUG oslo_vmware.api [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Task: {'id': task-2782326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193231} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.768084] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 818.768318] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 818.768534] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 818.768737] env[68233]: INFO nova.compute.manager [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Took 1.12 seconds to destroy the instance on the hypervisor. [ 818.769016] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 818.769209] env[68233]: DEBUG nova.compute.manager [-] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 818.769293] env[68233]: DEBUG nova.network.neutron [-] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 818.840851] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782327, 'name': CreateVM_Task, 'duration_secs': 0.335446} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.841043] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 818.841702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.841870] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.842210] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 818.842456] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24571639-fd11-4d06-9442-cfaa1ac38405 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.845907] env[68233]: DEBUG nova.scheduler.client.report [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.850204] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 818.850204] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d50a1e-f237-c9d4-0610-5a73f493bfba" [ 818.850204] env[68233]: _type = "Task" [ 818.850204] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.859336] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d50a1e-f237-c9d4-0610-5a73f493bfba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.949417] env[68233]: INFO nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Took 42.83 seconds to build instance. [ 819.103523] env[68233]: DEBUG nova.compute.manager [req-0f844098-e93f-4892-b95b-09ab82ae2e73 req-3e5e97bb-0ad9-4344-819a-8ab2c6f3dc2f service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Received event network-vif-deleted-4de9e0fc-a208-4d4d-a1db-73da575af588 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 819.103723] env[68233]: INFO nova.compute.manager [req-0f844098-e93f-4892-b95b-09ab82ae2e73 req-3e5e97bb-0ad9-4344-819a-8ab2c6f3dc2f service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Neutron deleted interface 4de9e0fc-a208-4d4d-a1db-73da575af588; detaching it from the instance and deleting it from the info cache [ 819.104105] env[68233]: DEBUG nova.network.neutron [req-0f844098-e93f-4892-b95b-09ab82ae2e73 req-3e5e97bb-0ad9-4344-819a-8ab2c6f3dc2f service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.235123] env[68233]: DEBUG nova.network.neutron [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Updated VIF entry in instance network info cache for port 4971febd-c50e-4e4b-88f7-18e159b3e7cb. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 819.235123] env[68233]: DEBUG nova.network.neutron [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Updating instance_info_cache with network_info: [{"id": "4971febd-c50e-4e4b-88f7-18e159b3e7cb", "address": "fa:16:3e:fe:4e:5a", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4971febd-c5", "ovs_interfaceid": "4971febd-c50e-4e4b-88f7-18e159b3e7cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.355031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.647s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.355782] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.358081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.042s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.359831] env[68233]: INFO nova.compute.claims [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.369042] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d50a1e-f237-c9d4-0610-5a73f493bfba, 'name': SearchDatastore_Task, 'duration_secs': 0.011832} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.369331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.369552] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 819.369784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.369963] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.370117] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 819.370367] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aa19a88-e779-4c08-a9fe-9e8878578add {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.379279] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 819.379864] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 819.380206] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3f77f7c-964c-4d03-95a1-1b991a6bbe67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.385375] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 819.385375] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f1d8d9-455c-9057-9864-c2e9061923a2" [ 819.385375] env[68233]: _type = "Task" [ 819.385375] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.392823] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f1d8d9-455c-9057-9864-c2e9061923a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.452206] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.925s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.525113] env[68233]: DEBUG nova.network.neutron [-] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.606497] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04f70247-5a6c-45ac-9ecf-4f8737da2a94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.617742] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845fd566-644f-4be5-91b2-ba0bdaaff8f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.651633] env[68233]: DEBUG nova.compute.manager [req-0f844098-e93f-4892-b95b-09ab82ae2e73 req-3e5e97bb-0ad9-4344-819a-8ab2c6f3dc2f service nova] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Detach interface failed, port_id=4de9e0fc-a208-4d4d-a1db-73da575af588, reason: Instance 28af332b-4f9b-4474-afdc-ab17e92df6e7 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 819.737294] env[68233]: DEBUG oslo_concurrency.lockutils [req-d54fed36-7807-4f4e-beaf-fa7a3d7c062c req-74fa166e-b4e7-499f-bd04-9015b538d341 service nova] Releasing lock "refresh_cache-07c7d125-d689-4499-aa4a-b9d3441c6fd0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.866501] env[68233]: DEBUG nova.compute.utils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.868123] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.868311] env[68233]: DEBUG nova.network.neutron [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.896750] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f1d8d9-455c-9057-9864-c2e9061923a2, 'name': SearchDatastore_Task, 'duration_secs': 0.010095} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.897657] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39324076-7c37-440a-8bb7-0974d0b1c34e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.902697] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 819.902697] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523e6bbb-ee18-e0e8-6c94-64e2100f61c5" [ 819.902697] env[68233]: _type = "Task" [ 819.902697] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.910115] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523e6bbb-ee18-e0e8-6c94-64e2100f61c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.936375] env[68233]: DEBUG nova.policy [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ce9112ab6ee4f9f87bd665884da6c1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f693e1f45b0d4fc0b871ae4dd2df6c4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.954899] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 819.969707] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0f4e91-9a51-4b46-96e6-925c309142a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.991773] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 820.029370] env[68233]: INFO nova.compute.manager [-] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Took 1.26 seconds to deallocate network for instance. [ 820.376434] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.421542] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523e6bbb-ee18-e0e8-6c94-64e2100f61c5, 'name': SearchDatastore_Task, 'duration_secs': 0.012055} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.424910] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.424910] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 07c7d125-d689-4499-aa4a-b9d3441c6fd0/07c7d125-d689-4499-aa4a-b9d3441c6fd0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 820.424910] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4a56dc1-b48a-42f7-bfdb-00472c5f109b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.432826] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 820.432826] env[68233]: value = "task-2782328" [ 820.432826] env[68233]: _type = "Task" [ 820.432826] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.444646] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782328, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.474992] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.499430] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 820.501046] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d957b1d-4ea3-4a49-be15-82ba4874b578 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.507137] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 820.507137] env[68233]: value = "task-2782329" [ 820.507137] env[68233]: _type = "Task" [ 820.507137] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.524445] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782329, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.539390] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.726359] env[68233]: DEBUG nova.network.neutron [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Successfully created port: 1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.945545] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782328, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494548} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.945877] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 07c7d125-d689-4499-aa4a-b9d3441c6fd0/07c7d125-d689-4499-aa4a-b9d3441c6fd0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 820.946077] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 820.946329] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7792b81-9b38-4226-b74b-0be6d6d2ad41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.954082] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 820.954082] env[68233]: value = "task-2782330" [ 820.954082] env[68233]: _type = "Task" [ 820.954082] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.966437] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782330, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.022974] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782329, 'name': PowerOffVM_Task, 'duration_secs': 0.230882} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.022974] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 821.022974] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 821.079523] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a8ebc7-b440-4ee0-b30f-941b4a359bbf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.089659] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1555f4a8-2feb-432a-9f38-bbd5e2480b20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.130333] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd85d343-cecd-4caf-8df3-55f361d6ed7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.138901] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb6452a-3fb1-455a-a053-b9ac9d713b1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.157593] env[68233]: DEBUG nova.compute.provider_tree [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.384032] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.409544] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.409823] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.409988] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.410189] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.410337] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.410484] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.410788] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.410994] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.411244] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.411423] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.411602] env[68233]: DEBUG nova.virt.hardware [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.412533] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0532299a-c0d7-41c0-bb2c-53cd67376819 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.420794] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1739fd63-71a3-49f7-bdf3-96b2056b2ab8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.466760] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782330, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07304} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.467213] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 821.468047] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b9e1da-f535-4d8a-9f96-04435379e65b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.489856] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Reconfiguring VM instance instance-0000003a to attach disk [datastore2] 07c7d125-d689-4499-aa4a-b9d3441c6fd0/07c7d125-d689-4499-aa4a-b9d3441c6fd0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 821.490178] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0bda263-5267-4cc3-820c-7e526687f546 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.509668] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 821.509668] env[68233]: value = "task-2782331" [ 821.509668] env[68233]: _type = "Task" [ 821.509668] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.519672] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782331, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.538059] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.538431] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.538700] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.539205] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.539426] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.539662] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.539985] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.540281] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.540697] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.541103] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.541372] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.547877] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90110417-529d-473d-9e79-31b952b28e2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.565881] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 821.565881] env[68233]: value = "task-2782332" [ 821.565881] env[68233]: _type = "Task" [ 821.565881] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.579021] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782332, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.662232] env[68233]: DEBUG nova.scheduler.client.report [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 822.021589] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782331, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.079950] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782332, 'name': ReconfigVM_Task, 'duration_secs': 0.206838} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.083022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 822.175233] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.814s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.175233] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 822.175739] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.818s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.176416] env[68233]: DEBUG nova.objects.instance [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lazy-loading 'resources' on Instance uuid ba4ad2f8-fad1-45be-b2b1-68c3a58f3750 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 822.500263] env[68233]: DEBUG nova.compute.manager [req-11ec16d3-0386-4c75-83e8-60da48df0409 req-bca85b79-a0d1-4d1d-90a7-41ad91410017 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Received event network-vif-plugged-1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 822.500839] env[68233]: DEBUG oslo_concurrency.lockutils [req-11ec16d3-0386-4c75-83e8-60da48df0409 req-bca85b79-a0d1-4d1d-90a7-41ad91410017 service nova] Acquiring lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.500839] env[68233]: DEBUG oslo_concurrency.lockutils [req-11ec16d3-0386-4c75-83e8-60da48df0409 req-bca85b79-a0d1-4d1d-90a7-41ad91410017 service nova] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.500963] env[68233]: DEBUG oslo_concurrency.lockutils [req-11ec16d3-0386-4c75-83e8-60da48df0409 req-bca85b79-a0d1-4d1d-90a7-41ad91410017 service nova] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.501168] env[68233]: DEBUG nova.compute.manager [req-11ec16d3-0386-4c75-83e8-60da48df0409 req-bca85b79-a0d1-4d1d-90a7-41ad91410017 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] No waiting events found dispatching network-vif-plugged-1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 822.501412] env[68233]: WARNING nova.compute.manager [req-11ec16d3-0386-4c75-83e8-60da48df0409 req-bca85b79-a0d1-4d1d-90a7-41ad91410017 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Received unexpected event network-vif-plugged-1ce20932-0faf-4cba-a1ab-409619e3147b for instance with vm_state building and task_state spawning. [ 822.523421] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782331, 'name': ReconfigVM_Task, 'duration_secs': 0.557202} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.523421] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Reconfigured VM instance instance-0000003a to attach disk [datastore2] 07c7d125-d689-4499-aa4a-b9d3441c6fd0/07c7d125-d689-4499-aa4a-b9d3441c6fd0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 822.523421] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6dee719c-45c9-4183-827a-9e84dfe5d27b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.530200] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 822.530200] env[68233]: value = "task-2782333" [ 822.530200] env[68233]: _type = "Task" [ 822.530200] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.540928] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782333, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.587844] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:51:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='508f22af-e037-4878-8980-ab644bbabaa4',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1432814527',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 822.588124] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 822.588367] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 822.588472] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 822.588618] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 822.588798] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 822.588969] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 822.590264] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 822.590264] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 822.590264] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 822.590387] env[68233]: DEBUG nova.virt.hardware [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 822.595863] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 822.596312] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-591cc2d6-15ff-4a54-81d5-e80d8b2cde86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.616914] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 822.616914] env[68233]: value = "task-2782334" [ 822.616914] env[68233]: _type = "Task" [ 822.616914] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.620848] env[68233]: DEBUG nova.network.neutron [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Successfully updated port: 1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 822.629463] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782334, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.679710] env[68233]: DEBUG nova.compute.utils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 822.685073] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 822.685073] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 822.739849] env[68233]: DEBUG nova.policy [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ed176bda3e54b87b7aabdefd3e01f4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c09004b4e0924f84a7362b2bef420a85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 823.043021] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782333, 'name': Rename_Task, 'duration_secs': 0.150086} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.043021] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 823.043412] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fecfd814-3a28-4db1-8d83-57fa8e747dec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.051220] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 823.051220] env[68233]: value = "task-2782335" [ 823.051220] env[68233]: _type = "Task" [ 823.051220] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.061147] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.125590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.125755] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.126120] env[68233]: DEBUG nova.network.neutron [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 823.133724] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782334, 'name': ReconfigVM_Task, 'duration_secs': 0.240497} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.134602] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 823.136336] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ea588d-8a5e-4043-8cb0-85b6a2e9ab96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.163671] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 823.164660] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Successfully created port: c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.168602] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5b2d81a-e351-453d-8912-a19d8bf51a9a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.184706] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 823.189041] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 823.189041] env[68233]: value = "task-2782336" [ 823.189041] env[68233]: _type = "Task" [ 823.189041] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.201141] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782336, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.273785] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80cb1d1-9504-43b4-a9e1-d41606dc51bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.281391] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafa6878-7cc2-4e13-911d-402c48148055 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.314210] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a66afc0-cb45-4218-bfc7-e32c9e8094de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.322733] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d3fa6d-53cf-40a8-8611-2153fa96435b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.336737] env[68233]: DEBUG nova.compute.provider_tree [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.561535] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782335, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.577597] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Successfully created port: 25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.669898] env[68233]: DEBUG nova.network.neutron [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 823.705039] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782336, 'name': ReconfigVM_Task, 'duration_secs': 0.270811} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.705039] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 823.705039] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 823.811207] env[68233]: DEBUG nova.network.neutron [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Updating instance_info_cache with network_info: [{"id": "1ce20932-0faf-4cba-a1ab-409619e3147b", "address": "fa:16:3e:5e:d7:b7", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce20932-0f", "ovs_interfaceid": "1ce20932-0faf-4cba-a1ab-409619e3147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.841925] env[68233]: DEBUG nova.scheduler.client.report [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 824.064604] env[68233]: DEBUG oslo_vmware.api [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782335, 'name': PowerOnVM_Task, 'duration_secs': 0.641214} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.064942] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 824.064942] env[68233]: INFO nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Took 8.04 seconds to spawn the instance on the hypervisor. [ 824.065106] env[68233]: DEBUG nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 824.065879] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeed3273-643a-4771-9891-c70b711b3c37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.199034] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 824.211662] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaeed5b0-dae0-45d8-8653-bfa8ce92a789 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.235494] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760cf86b-bafc-4cfe-b960-5e4fdcaa7116 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.240049] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 824.240300] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 824.240540] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 824.240768] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 824.240949] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 824.241131] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 824.241330] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 824.241483] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 824.241645] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 824.241805] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 824.241974] env[68233]: DEBUG nova.virt.hardware [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 824.242720] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae1ede1-1dc5-43d7-b1b0-8d0e274272b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.258811] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 824.267505] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9703dd8e-650e-408b-a189-656944e44df2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.316606] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.316919] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Instance network_info: |[{"id": "1ce20932-0faf-4cba-a1ab-409619e3147b", "address": "fa:16:3e:5e:d7:b7", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce20932-0f", "ovs_interfaceid": "1ce20932-0faf-4cba-a1ab-409619e3147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 824.317336] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:d7:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ce20932-0faf-4cba-a1ab-409619e3147b', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 824.324866] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Creating folder: Project (f693e1f45b0d4fc0b871ae4dd2df6c4e). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.325142] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6aceefed-4e0a-4d1a-817a-91032b111a9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.335849] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Created folder: Project (f693e1f45b0d4fc0b871ae4dd2df6c4e) in parent group-v559223. [ 824.336037] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Creating folder: Instances. Parent ref: group-v559384. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 824.336251] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1be2ce67-e376-478d-9761-d56545b6cf7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.345458] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Created folder: Instances in parent group-v559384. [ 824.345676] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.345849] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 824.346527] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.171s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.348455] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-48a0c9be-649b-4ecf-822c-3b2d8dc18b71 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.362450] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.414s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.363991] env[68233]: INFO nova.compute.claims [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.367943] env[68233]: INFO nova.scheduler.client.report [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Deleted allocations for instance ba4ad2f8-fad1-45be-b2b1-68c3a58f3750 [ 824.373349] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 824.373349] env[68233]: value = "task-2782339" [ 824.373349] env[68233]: _type = "Task" [ 824.373349] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.381623] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782339, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.525115] env[68233]: DEBUG nova.compute.manager [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Received event network-changed-1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 824.525325] env[68233]: DEBUG nova.compute.manager [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Refreshing instance network info cache due to event network-changed-1ce20932-0faf-4cba-a1ab-409619e3147b. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 824.525544] env[68233]: DEBUG oslo_concurrency.lockutils [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] Acquiring lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.525686] env[68233]: DEBUG oslo_concurrency.lockutils [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] Acquired lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.525843] env[68233]: DEBUG nova.network.neutron [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Refreshing network info cache for port 1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.582053] env[68233]: INFO nova.compute.manager [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Took 43.42 seconds to build instance. [ 824.801099] env[68233]: DEBUG nova.network.neutron [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Port 1c30459d-e88b-42bd-8073-04aa89cecbc3 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 824.881238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-45005d58-0b5e-4128-bc3b-febeb388121d tempest-FloatingIPsAssociationTestJSON-480767673 tempest-FloatingIPsAssociationTestJSON-480767673-project-member] Lock "ba4ad2f8-fad1-45be-b2b1-68c3a58f3750" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.111s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.887791] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782339, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.084579] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bf39228d-32d4-407e-be2e-9cfa62cbfbe0 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.518s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.300837] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Successfully updated port: c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.388611] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782339, 'name': CreateVM_Task, 'duration_secs': 0.645479} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.391816] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 825.391816] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.391816] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.391816] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 825.391816] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5446dd1a-9eda-454b-97c0-c522fb9ca60b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.395639] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 825.395639] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e16ede-3baf-b4ac-e9bc-2e5821e2af83" [ 825.395639] env[68233]: _type = "Task" [ 825.395639] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.409555] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e16ede-3baf-b4ac-e9bc-2e5821e2af83, 'name': SearchDatastore_Task, 'duration_secs': 0.009917} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.412578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.412780] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 825.413135] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.413321] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.413531] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 825.414009] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f453d8bf-8b94-42f9-a484-33fca99aeb00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.421940] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 825.422102] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 825.423397] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40036497-70cb-46fc-986a-e772549b8c2f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.428093] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 825.428093] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6c0ed-723d-55d5-27d1-0e17b5f68926" [ 825.428093] env[68233]: _type = "Task" [ 825.428093] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.440981] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6c0ed-723d-55d5-27d1-0e17b5f68926, 'name': SearchDatastore_Task, 'duration_secs': 0.007905} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.442199] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45a1bb33-a6e0-4855-ba26-241187952bf5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.448957] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 825.448957] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527477ae-1c66-77f3-6273-bc9f70bd3bed" [ 825.448957] env[68233]: _type = "Task" [ 825.448957] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.457890] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527477ae-1c66-77f3-6273-bc9f70bd3bed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.467601] env[68233]: DEBUG nova.network.neutron [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Updated VIF entry in instance network info cache for port 1ce20932-0faf-4cba-a1ab-409619e3147b. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 825.467950] env[68233]: DEBUG nova.network.neutron [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Updating instance_info_cache with network_info: [{"id": "1ce20932-0faf-4cba-a1ab-409619e3147b", "address": "fa:16:3e:5e:d7:b7", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce20932-0f", "ovs_interfaceid": "1ce20932-0faf-4cba-a1ab-409619e3147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.525322] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "64b8997c-3246-4c97-a6c9-3a6a23645d38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.525571] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.525773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "64b8997c-3246-4c97-a6c9-3a6a23645d38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.525952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.526184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.530025] env[68233]: INFO nova.compute.manager [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Terminating instance [ 825.588611] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 825.660271] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.660980] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.660980] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.661174] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.661402] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.663733] env[68233]: INFO nova.compute.manager [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Terminating instance [ 825.835389] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.835602] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.836461] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.894920] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b985e879-0ee8-4574-9f4f-5c9c3446fd4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.903153] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aa72aa-2f27-4b46-9614-e3bdc68c9585 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.937779] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152f990d-fc58-47e1-887e-8c2b5ce92c13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.945923] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48330f94-bb3e-41df-986f-ba1837f8941b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.968962] env[68233]: DEBUG nova.compute.provider_tree [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.970455] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527477ae-1c66-77f3-6273-bc9f70bd3bed, 'name': SearchDatastore_Task, 'duration_secs': 0.008861} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.973171] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.973171] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/16f20fab-ccf8-4a47-ae7d-9ab55932c5c9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.973171] env[68233]: DEBUG oslo_concurrency.lockutils [req-4d92464d-e8fb-4271-b700-25a78f849bee req-f6ea13a6-2c35-4c64-8744-fe0788b2c056 service nova] Releasing lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.973171] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-415e37d4-5830-4ced-9532-e73af72a5a8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.982387] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 825.982387] env[68233]: value = "task-2782340" [ 825.982387] env[68233]: _type = "Task" [ 825.982387] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.993848] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.037851] env[68233]: DEBUG nova.compute.manager [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.037851] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.038810] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ceba9d5-cdcb-4cc0-889e-fef8fbacf3ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.049129] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.049510] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-569601e6-db07-4b40-81f9-f1da7ee55058 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.057688] env[68233]: DEBUG oslo_vmware.api [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 826.057688] env[68233]: value = "task-2782341" [ 826.057688] env[68233]: _type = "Task" [ 826.057688] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.070215] env[68233]: DEBUG oslo_vmware.api [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782341, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.119553] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.172166] env[68233]: DEBUG nova.compute.manager [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 826.172442] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 826.173522] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac93cd23-ab99-4858-b0be-6ccc2b0f1d1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.182933] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 826.183302] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cde423ab-835a-4f9f-b43e-500cad541f41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.191138] env[68233]: DEBUG oslo_vmware.api [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 826.191138] env[68233]: value = "task-2782342" [ 826.191138] env[68233]: _type = "Task" [ 826.191138] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.201129] env[68233]: DEBUG oslo_vmware.api [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782342, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.472956] env[68233]: DEBUG nova.scheduler.client.report [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 826.494520] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782340, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.49462} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.494879] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/16f20fab-ccf8-4a47-ae7d-9ab55932c5c9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.495141] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.495460] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-00ed8a84-4c30-4b31-94ec-5d59255ae820 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.504018] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 826.504018] env[68233]: value = "task-2782343" [ 826.504018] env[68233]: _type = "Task" [ 826.504018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.513398] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.571920] env[68233]: DEBUG oslo_vmware.api [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782341, 'name': PowerOffVM_Task, 'duration_secs': 0.377274} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.571920] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 826.571920] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 826.571920] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13a7590d-ea18-414b-85df-0ce7b40b4bd3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.639112] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 826.639350] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 826.639540] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleting the datastore file [datastore2] 64b8997c-3246-4c97-a6c9-3a6a23645d38 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 826.639805] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3ce8496-75d2-41d3-ad41-1fed0b1ce748 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.647050] env[68233]: DEBUG oslo_vmware.api [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 826.647050] env[68233]: value = "task-2782345" [ 826.647050] env[68233]: _type = "Task" [ 826.647050] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.657057] env[68233]: DEBUG oslo_vmware.api [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782345, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.702162] env[68233]: DEBUG oslo_vmware.api [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782342, 'name': PowerOffVM_Task, 'duration_secs': 0.310394} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.702426] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 826.703164] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 826.703164] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11757678-34d9-4356-a59b-fc6ab9d2e267 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.779727] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 826.780062] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 826.780261] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleting the datastore file [datastore2] 07c7d125-d689-4499-aa4a-b9d3441c6fd0 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 826.780552] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14a334c0-f6ff-4218-9f1c-ec68560f1dd4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.790943] env[68233]: DEBUG oslo_vmware.api [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 826.790943] env[68233]: value = "task-2782347" [ 826.790943] env[68233]: _type = "Task" [ 826.790943] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.803180] env[68233]: DEBUG oslo_vmware.api [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782347, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.897311] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.897507] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.897693] env[68233]: DEBUG nova.network.neutron [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.953349] env[68233]: DEBUG nova.compute.manager [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received event network-vif-plugged-c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 826.953349] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] Acquiring lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.953349] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] Lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.953553] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] Lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.953937] env[68233]: DEBUG nova.compute.manager [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] No waiting events found dispatching network-vif-plugged-c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 826.953937] env[68233]: WARNING nova.compute.manager [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received unexpected event network-vif-plugged-c35d0002-3d03-4b47-bc5d-c4f180e895e8 for instance with vm_state building and task_state spawning. [ 826.954154] env[68233]: DEBUG nova.compute.manager [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received event network-changed-c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 826.954397] env[68233]: DEBUG nova.compute.manager [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Refreshing instance network info cache due to event network-changed-c35d0002-3d03-4b47-bc5d-c4f180e895e8. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 826.954397] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] Acquiring lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.954540] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] Acquired lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.954685] env[68233]: DEBUG nova.network.neutron [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Refreshing network info cache for port c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 826.978474] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.616s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.978999] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.981935] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.439s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.982167] env[68233]: DEBUG nova.objects.instance [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lazy-loading 'resources' on Instance uuid 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 827.014889] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069536} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.015262] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 827.016780] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b6c9f7-e564-4b5e-b302-07b1456f6cd4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.041769] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/16f20fab-ccf8-4a47-ae7d-9ab55932c5c9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 827.043485] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0939b04-895b-4d2e-860e-2193f0f05a80 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.068445] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 827.068445] env[68233]: value = "task-2782348" [ 827.068445] env[68233]: _type = "Task" [ 827.068445] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.080074] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782348, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.157447] env[68233]: DEBUG oslo_vmware.api [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782345, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133208} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.157712] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.157888] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 827.158076] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.158262] env[68233]: INFO nova.compute.manager [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Took 1.12 seconds to destroy the instance on the hypervisor. [ 827.158501] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 827.158684] env[68233]: DEBUG nova.compute.manager [-] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 827.158777] env[68233]: DEBUG nova.network.neutron [-] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.306086] env[68233]: DEBUG oslo_vmware.api [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782347, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13972} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.306366] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 827.306552] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 827.306733] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.306914] env[68233]: INFO nova.compute.manager [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 827.307387] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 827.307610] env[68233]: DEBUG nova.compute.manager [-] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 827.307707] env[68233]: DEBUG nova.network.neutron [-] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 827.411499] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Successfully updated port: 25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.487758] env[68233]: DEBUG nova.compute.utils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 827.489273] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 827.489447] env[68233]: DEBUG nova.network.neutron [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.505039] env[68233]: DEBUG nova.network.neutron [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.528098] env[68233]: DEBUG nova.compute.manager [req-57b778b4-db09-4f1e-a232-c12a521d9237 req-fafa77ef-04da-4d3b-99a1-a1f6720aa8dc service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received event network-vif-plugged-25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 827.528098] env[68233]: DEBUG oslo_concurrency.lockutils [req-57b778b4-db09-4f1e-a232-c12a521d9237 req-fafa77ef-04da-4d3b-99a1-a1f6720aa8dc service nova] Acquiring lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.528098] env[68233]: DEBUG oslo_concurrency.lockutils [req-57b778b4-db09-4f1e-a232-c12a521d9237 req-fafa77ef-04da-4d3b-99a1-a1f6720aa8dc service nova] Lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.528098] env[68233]: DEBUG oslo_concurrency.lockutils [req-57b778b4-db09-4f1e-a232-c12a521d9237 req-fafa77ef-04da-4d3b-99a1-a1f6720aa8dc service nova] Lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.528098] env[68233]: DEBUG nova.compute.manager [req-57b778b4-db09-4f1e-a232-c12a521d9237 req-fafa77ef-04da-4d3b-99a1-a1f6720aa8dc service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] No waiting events found dispatching network-vif-plugged-25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 827.528098] env[68233]: WARNING nova.compute.manager [req-57b778b4-db09-4f1e-a232-c12a521d9237 req-fafa77ef-04da-4d3b-99a1-a1f6720aa8dc service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received unexpected event network-vif-plugged-25a5eb6c-78b9-4400-86e5-a52be08b80e4 for instance with vm_state building and task_state spawning. [ 827.584477] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782348, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.625123] env[68233]: DEBUG nova.network.neutron [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.636642] env[68233]: DEBUG nova.policy [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.915550] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.993552] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 828.029119] env[68233]: DEBUG nova.network.neutron [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.081219] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782348, 'name': ReconfigVM_Task, 'duration_secs': 0.821692} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.084250] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/16f20fab-ccf8-4a47-ae7d-9ab55932c5c9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.085058] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f15c122e-e6f3-427e-ae1d-9cb279ced864 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.094190] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 828.094190] env[68233]: value = "task-2782349" [ 828.094190] env[68233]: _type = "Task" [ 828.094190] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.106923] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782349, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.114984] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499a382e-02f8-4d10-9232-1a404543128e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.123182] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f159b2c0-5954-4125-82e9-3b55b259366f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.128509] env[68233]: DEBUG oslo_concurrency.lockutils [req-d5b30bd8-f050-4c0c-b0bf-d50db885b7e8 req-26fb08f2-6517-4d12-bb56-740a2d48430c service nova] Releasing lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.128929] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.129254] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.159344] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97fcaaba-fe47-4e34-98f9-db043e1f4ac6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.168469] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8475526a-b978-4c43-92c8-7be8708a2cc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.185387] env[68233]: DEBUG nova.compute.provider_tree [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.194534] env[68233]: DEBUG nova.network.neutron [-] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.357857] env[68233]: DEBUG nova.network.neutron [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Successfully created port: 6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 828.382490] env[68233]: DEBUG nova.network.neutron [-] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.533897] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.606058] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782349, 'name': Rename_Task, 'duration_secs': 0.148604} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.606058] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.606058] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97e91057-3403-4844-a348-d7bf23ce0dbe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.613590] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 828.613590] env[68233]: value = "task-2782350" [ 828.613590] env[68233]: _type = "Task" [ 828.613590] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.624010] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782350, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.687974] env[68233]: DEBUG nova.scheduler.client.report [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.696626] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 828.698618] env[68233]: INFO nova.compute.manager [-] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Took 1.39 seconds to deallocate network for instance. [ 828.888488] env[68233]: INFO nova.compute.manager [-] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Took 1.73 seconds to deallocate network for instance. [ 828.985289] env[68233]: DEBUG nova.compute.manager [req-0bbd6f79-e020-4ec5-a34b-9ec136a998b0 req-764d7992-5477-454d-a9a9-26410430e602 service nova] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Received event network-vif-deleted-6d3afe70-2479-4ac4-a335-e5a1f04778a7 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 829.004376] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 829.017656] env[68233]: DEBUG nova.network.neutron [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Updating instance_info_cache with network_info: [{"id": "c35d0002-3d03-4b47-bc5d-c4f180e895e8", "address": "fa:16:3e:90:a4:b1", "network": {"id": "314a2526-9b26-4941-b8d1-60126c786dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1823335609", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35d0002-3d", "ovs_interfaceid": "c35d0002-3d03-4b47-bc5d-c4f180e895e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "25a5eb6c-78b9-4400-86e5-a52be08b80e4", "address": "fa:16:3e:0d:be:5f", "network": {"id": "1198a55e-eb8f-4626-8700-0428a6119d0a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-212493659", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a5eb6c-78", "ovs_interfaceid": "25a5eb6c-78b9-4400-86e5-a52be08b80e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.033866] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 829.034131] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.034295] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 829.034480] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.034623] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 829.034793] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 829.034982] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 829.035164] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 829.035339] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 829.035502] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 829.035671] env[68233]: DEBUG nova.virt.hardware [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 829.036831] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9b2b25b-6db5-46e1-a3b4-2646a1372253 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.049839] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0b1994-66fa-42e6-9fb1-40bf0c30faa9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.066450] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb7a0f3-22e4-470d-94ff-e200930d7bff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.087808] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8433f846-7fdb-4ce5-b1f6-c4f494cc6a1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.096028] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 829.124902] env[68233]: DEBUG oslo_vmware.api [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782350, 'name': PowerOnVM_Task, 'duration_secs': 0.457477} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.125202] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 829.125411] env[68233]: INFO nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Took 7.74 seconds to spawn the instance on the hypervisor. [ 829.125930] env[68233]: DEBUG nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 829.126982] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28baa676-d8c4-401c-a8e9-9f38bd509eab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.193413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.211s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.195715] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.263s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.197203] env[68233]: INFO nova.compute.claims [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.206441] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.211375] env[68233]: INFO nova.scheduler.client.report [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Deleted allocations for instance 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6 [ 829.394141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.522019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Releasing lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.522019] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Instance network_info: |[{"id": "c35d0002-3d03-4b47-bc5d-c4f180e895e8", "address": "fa:16:3e:90:a4:b1", "network": {"id": "314a2526-9b26-4941-b8d1-60126c786dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1823335609", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35d0002-3d", "ovs_interfaceid": "c35d0002-3d03-4b47-bc5d-c4f180e895e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "25a5eb6c-78b9-4400-86e5-a52be08b80e4", "address": "fa:16:3e:0d:be:5f", "network": {"id": "1198a55e-eb8f-4626-8700-0428a6119d0a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-212493659", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a5eb6c-78", "ovs_interfaceid": "25a5eb6c-78b9-4400-86e5-a52be08b80e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.522019] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:a4:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7654928b-7afe-42e3-a18d-68ecc775cefe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c35d0002-3d03-4b47-bc5d-c4f180e895e8', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:be:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cfbd1264-be3d-4ca9-953a-df79de7b010b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25a5eb6c-78b9-4400-86e5-a52be08b80e4', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.531122] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.531350] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.531573] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d429738e-db26-4403-80be-5524b447c9e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.562435] env[68233]: DEBUG nova.compute.manager [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received event network-changed-25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 829.562435] env[68233]: DEBUG nova.compute.manager [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Refreshing instance network info cache due to event network-changed-25a5eb6c-78b9-4400-86e5-a52be08b80e4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 829.562435] env[68233]: DEBUG oslo_concurrency.lockutils [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] Acquiring lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.562589] env[68233]: DEBUG oslo_concurrency.lockutils [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] Acquired lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.562747] env[68233]: DEBUG nova.network.neutron [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Refreshing network info cache for port 25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.573593] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.573593] env[68233]: value = "task-2782351" [ 829.573593] env[68233]: _type = "Task" [ 829.573593] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.582906] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782351, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.602604] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.602892] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67cb7f06-ec6a-4e74-9c9c-31390ae425c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.611787] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 829.611787] env[68233]: value = "task-2782352" [ 829.611787] env[68233]: _type = "Task" [ 829.611787] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.625189] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782352, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.645460] env[68233]: INFO nova.compute.manager [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Took 45.89 seconds to build instance. [ 829.718631] env[68233]: DEBUG oslo_concurrency.lockutils [None req-73ddd603-6269-463a-8137-fd235e748374 tempest-VolumesAssistedSnapshotsTest-1273672019 tempest-VolumesAssistedSnapshotsTest-1273672019-project-member] Lock "9eeb90c6-6ac2-43cb-887a-b69a28dc43a6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.820s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.019209] env[68233]: DEBUG nova.network.neutron [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Successfully updated port: 6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 830.084069] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782351, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.125019] env[68233]: DEBUG oslo_vmware.api [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782352, 'name': PowerOnVM_Task, 'duration_secs': 0.425538} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.125359] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.125592] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bba9dcc5-64e4-4447-bb8c-3efbb6313eff tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance '72467d49-6fa8-42db-871e-4e50e77eedf7' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 830.148047] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54d24d9d-0fe5-4c74-b75e-9c0816d97268 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.984s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.365025] env[68233]: DEBUG nova.network.neutron [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Updated VIF entry in instance network info cache for port 25a5eb6c-78b9-4400-86e5-a52be08b80e4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.365498] env[68233]: DEBUG nova.network.neutron [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Updating instance_info_cache with network_info: [{"id": "c35d0002-3d03-4b47-bc5d-c4f180e895e8", "address": "fa:16:3e:90:a4:b1", "network": {"id": "314a2526-9b26-4941-b8d1-60126c786dc0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1823335609", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.179", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7654928b-7afe-42e3-a18d-68ecc775cefe", "external-id": "cl2-zone-807", "segmentation_id": 807, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc35d0002-3d", "ovs_interfaceid": "c35d0002-3d03-4b47-bc5d-c4f180e895e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "25a5eb6c-78b9-4400-86e5-a52be08b80e4", "address": "fa:16:3e:0d:be:5f", "network": {"id": "1198a55e-eb8f-4626-8700-0428a6119d0a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-212493659", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.165", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "c09004b4e0924f84a7362b2bef420a85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cfbd1264-be3d-4ca9-953a-df79de7b010b", "external-id": "nsx-vlan-transportzone-543", "segmentation_id": 543, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25a5eb6c-78", "ovs_interfaceid": "25a5eb6c-78b9-4400-86e5-a52be08b80e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.520450] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.520588] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.520734] env[68233]: DEBUG nova.network.neutron [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.584827] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782351, 'name': CreateVM_Task, 'duration_secs': 0.577113} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.585014] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.585793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.585952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.586331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.586557] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93d0f629-f87e-4fb2-95a1-6cc3162fa236 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.592539] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 830.592539] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7e6c9-e67c-f6e9-da2b-cd9a7f80399c" [ 830.592539] env[68233]: _type = "Task" [ 830.592539] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.606072] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7e6c9-e67c-f6e9-da2b-cd9a7f80399c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.651386] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.871317] env[68233]: DEBUG oslo_concurrency.lockutils [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] Releasing lock "refresh_cache-da2a5acb-0861-4225-a6b4-324482c480ea" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 830.871317] env[68233]: DEBUG nova.compute.manager [req-56ba548b-9e38-4cc0-b743-b3db1470a888 req-4607057e-28b6-438d-a5da-a3f241ba6941 service nova] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Received event network-vif-deleted-4971febd-c50e-4e4b-88f7-18e159b3e7cb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 830.895151] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16114a1f-658a-4571-b12d-585f6a9c9538 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.902109] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff522d41-3fee-4e68-aa86-18cd19b0d67e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.935474] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209d4813-1b1a-40d3-bc1a-00cef4377192 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.945319] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde0c9f7-6319-408e-a093-0194479cc69f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.962214] env[68233]: DEBUG nova.compute.provider_tree [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.967035] env[68233]: INFO nova.compute.manager [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Rescuing [ 830.967035] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.967035] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.967035] env[68233]: DEBUG nova.network.neutron [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.013937] env[68233]: DEBUG nova.compute.manager [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Received event network-vif-plugged-6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 831.014172] env[68233]: DEBUG oslo_concurrency.lockutils [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] Acquiring lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.014378] env[68233]: DEBUG oslo_concurrency.lockutils [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.014546] env[68233]: DEBUG oslo_concurrency.lockutils [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.014713] env[68233]: DEBUG nova.compute.manager [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] No waiting events found dispatching network-vif-plugged-6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 831.014875] env[68233]: WARNING nova.compute.manager [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Received unexpected event network-vif-plugged-6aca81ac-c770-4892-bc3b-9d9096c8656e for instance with vm_state building and task_state spawning. [ 831.015449] env[68233]: DEBUG nova.compute.manager [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Received event network-changed-6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 831.015733] env[68233]: DEBUG nova.compute.manager [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Refreshing instance network info cache due to event network-changed-6aca81ac-c770-4892-bc3b-9d9096c8656e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 831.015972] env[68233]: DEBUG oslo_concurrency.lockutils [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] Acquiring lock "refresh_cache-3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.053239] env[68233]: DEBUG nova.network.neutron [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 831.109375] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c7e6c9-e67c-f6e9-da2b-cd9a7f80399c, 'name': SearchDatastore_Task, 'duration_secs': 0.012918} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.109682] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.109936] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.110215] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.110373] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.110592] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.110951] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ee35d99-de0f-41c5-bfd1-0224d5b9c91f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.122134] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.122134] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.122134] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb29c64-458d-411f-a6cc-15ecae1b221e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.128180] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 831.128180] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad7c18-4509-df23-1118-052294b25e84" [ 831.128180] env[68233]: _type = "Task" [ 831.128180] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.144072] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad7c18-4509-df23-1118-052294b25e84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.178214] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.249727] env[68233]: DEBUG nova.network.neutron [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Updating instance_info_cache with network_info: [{"id": "6aca81ac-c770-4892-bc3b-9d9096c8656e", "address": "fa:16:3e:75:38:3e", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aca81ac-c7", "ovs_interfaceid": "6aca81ac-c770-4892-bc3b-9d9096c8656e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.465036] env[68233]: DEBUG nova.scheduler.client.report [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 831.640656] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad7c18-4509-df23-1118-052294b25e84, 'name': SearchDatastore_Task, 'duration_secs': 0.009575} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.643720] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adc65c6c-813f-48d5-b1cd-52a7c0bb9279 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.659615] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 831.659615] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c00c50-f00a-4550-5c36-acb27e795713" [ 831.659615] env[68233]: _type = "Task" [ 831.659615] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.672035] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c00c50-f00a-4550-5c36-acb27e795713, 'name': SearchDatastore_Task, 'duration_secs': 0.010779} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.672481] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.672862] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] da2a5acb-0861-4225-a6b4-324482c480ea/da2a5acb-0861-4225-a6b4-324482c480ea.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 831.676868] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0be62417-1e71-4a52-8c75-47b589b734d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.684119] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 831.684119] env[68233]: value = "task-2782353" [ 831.684119] env[68233]: _type = "Task" [ 831.684119] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.695022] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.746464] env[68233]: DEBUG nova.network.neutron [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Updating instance_info_cache with network_info: [{"id": "1ce20932-0faf-4cba-a1ab-409619e3147b", "address": "fa:16:3e:5e:d7:b7", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ce20932-0f", "ovs_interfaceid": "1ce20932-0faf-4cba-a1ab-409619e3147b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.753026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.753703] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Instance network_info: |[{"id": "6aca81ac-c770-4892-bc3b-9d9096c8656e", "address": "fa:16:3e:75:38:3e", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aca81ac-c7", "ovs_interfaceid": "6aca81ac-c770-4892-bc3b-9d9096c8656e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 831.756537] env[68233]: DEBUG oslo_concurrency.lockutils [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] Acquired lock "refresh_cache-3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.756776] env[68233]: DEBUG nova.network.neutron [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Refreshing network info cache for port 6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.758291] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:38:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6aca81ac-c770-4892-bc3b-9d9096c8656e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.770495] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.772236] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.772236] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da1f2347-db51-447e-9163-82ae93bc8860 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.800093] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.800093] env[68233]: value = "task-2782354" [ 831.800093] env[68233]: _type = "Task" [ 831.800093] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.816734] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782354, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.972978] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.777s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.973721] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.976832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.497s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.977132] env[68233]: DEBUG nova.objects.instance [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lazy-loading 'resources' on Instance uuid 135c2d22-26ac-41a4-a860-accc12dd4c9a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 832.198014] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782353, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488131} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.198957] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] da2a5acb-0861-4225-a6b4-324482c480ea/da2a5acb-0861-4225-a6b4-324482c480ea.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 832.199424] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 832.200260] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5aa8bbb-6d2a-4e98-a951-a4594b082f62 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.209025] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 832.209025] env[68233]: value = "task-2782355" [ 832.209025] env[68233]: _type = "Task" [ 832.209025] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.220569] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.247483] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "refresh_cache-16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.316013] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782354, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.486689] env[68233]: DEBUG nova.compute.utils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 832.492913] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 832.493184] env[68233]: DEBUG nova.network.neutron [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 832.615034] env[68233]: DEBUG nova.policy [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fad204b45934ee18bdf27783c32f4a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a595904e02240fa8c04e471a0affef3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 832.689317] env[68233]: DEBUG nova.network.neutron [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Updated VIF entry in instance network info cache for port 6aca81ac-c770-4892-bc3b-9d9096c8656e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.689669] env[68233]: DEBUG nova.network.neutron [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Updating instance_info_cache with network_info: [{"id": "6aca81ac-c770-4892-bc3b-9d9096c8656e", "address": "fa:16:3e:75:38:3e", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aca81ac-c7", "ovs_interfaceid": "6aca81ac-c770-4892-bc3b-9d9096c8656e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.722422] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064559} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.725030] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 832.726027] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4032eda-6b79-46a5-ae10-17a0dc30091f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.753395] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] da2a5acb-0861-4225-a6b4-324482c480ea/da2a5acb-0861-4225-a6b4-324482c480ea.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 832.756427] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-962105ae-6af2-4eb7-91ef-dc2c171c97c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.780335] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 832.780335] env[68233]: value = "task-2782356" [ 832.780335] env[68233]: _type = "Task" [ 832.780335] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.794035] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782356, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.817303] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782354, 'name': CreateVM_Task, 'duration_secs': 0.710396} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.817478] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 832.818211] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.818374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.818740] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 832.819073] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64113b92-1b81-4d41-9429-6e75e69a0a28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.824976] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 832.824976] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f3d058-8075-8089-c955-d6d8e1d1adeb" [ 832.824976] env[68233]: _type = "Task" [ 832.824976] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.835138] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f3d058-8075-8089-c955-d6d8e1d1adeb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.993938] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 833.147546] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d64d5b7-26d1-4905-ac92-2d5ad85b3da3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.154458] env[68233]: DEBUG nova.network.neutron [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Port 1c30459d-e88b-42bd-8073-04aa89cecbc3 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 833.154458] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.154458] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.154458] env[68233]: DEBUG nova.network.neutron [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 833.164808] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d3d190-fffa-46d4-af81-b28e31a3ad20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.201399] env[68233]: DEBUG oslo_concurrency.lockutils [req-159a1bad-afdc-47a4-93ba-01f23eeb3fa8 req-d6508418-0330-4bb1-9a6d-5b1ce98fa4eb service nova] Releasing lock "refresh_cache-3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.202822] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e91d8fc-a4f1-410d-956a-f5865e057495 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.212328] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4f9c4e-7223-46f2-ba56-547a637f9d38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.232249] env[68233]: DEBUG nova.compute.provider_tree [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.293079] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.337054] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f3d058-8075-8089-c955-d6d8e1d1adeb, 'name': SearchDatastore_Task, 'duration_secs': 0.04066} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.337500] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.337816] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.338114] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.338275] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.338462] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.339068] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73249ca4-18f6-45fc-9ec7-80dc06ad634c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.354821] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.355040] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.355825] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fff3624e-056f-43e8-bb99-9ba6e6c59610 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.363185] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 833.363185] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c1a3ee-7101-5cb3-d9d1-3e4f2591c67a" [ 833.363185] env[68233]: _type = "Task" [ 833.363185] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.372084] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c1a3ee-7101-5cb3-d9d1-3e4f2591c67a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.735509] env[68233]: DEBUG nova.scheduler.client.report [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.798225] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.798686] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782356, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.799081] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bd8c90f-684b-43a7-afb0-8f0ae774e645 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.802140] env[68233]: DEBUG nova.network.neutron [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Successfully created port: 1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.806316] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 833.806316] env[68233]: value = "task-2782357" [ 833.806316] env[68233]: _type = "Task" [ 833.806316] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.815668] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.877337] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c1a3ee-7101-5cb3-d9d1-3e4f2591c67a, 'name': SearchDatastore_Task, 'duration_secs': 0.048788} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.878440] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9757e6d5-5935-4d17-bc5c-cfab2a10b007 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.886381] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 833.886381] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e50b63-7c12-f463-22ed-559a52695bac" [ 833.886381] env[68233]: _type = "Task" [ 833.886381] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.897635] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e50b63-7c12-f463-22ed-559a52695bac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.005019] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 834.031122] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 834.031122] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.031122] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 834.031122] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.031122] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 834.031606] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 834.031955] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 834.032336] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 834.032619] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 834.032947] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 834.033285] env[68233]: DEBUG nova.virt.hardware [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 834.034289] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878c856f-ae5f-4967-8654-a47eba5b1873 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.045367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c29d61-0718-4d42-9e4c-76b8a77d5982 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.244105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.265s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.246127] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.018s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.249479] env[68233]: INFO nova.compute.claims [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.272879] env[68233]: INFO nova.scheduler.client.report [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Deleted allocations for instance 135c2d22-26ac-41a4-a860-accc12dd4c9a [ 834.297473] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782356, 'name': ReconfigVM_Task, 'duration_secs': 1.110714} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.301086] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Reconfigured VM instance instance-0000003c to attach disk [datastore2] da2a5acb-0861-4225-a6b4-324482c480ea/da2a5acb-0861-4225-a6b4-324482c480ea.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.301086] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5914758a-f774-4aea-9061-c86acc9f6fb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.307030] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 834.307030] env[68233]: value = "task-2782358" [ 834.307030] env[68233]: _type = "Task" [ 834.307030] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.311457] env[68233]: DEBUG nova.network.neutron [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.322872] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782357, 'name': PowerOffVM_Task, 'duration_secs': 0.213095} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.326254] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 834.326909] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782358, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.327729] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a9b181-c6c1-4810-87d3-9107252dcb5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.348980] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e236f2d6-c07f-4be8-98f2-e5dd0e957797 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.384213] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.384213] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c02f4fd5-2fce-4c06-9c25-f06cb31182aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.394120] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 834.394120] env[68233]: value = "task-2782359" [ 834.394120] env[68233]: _type = "Task" [ 834.394120] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.402810] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e50b63-7c12-f463-22ed-559a52695bac, 'name': SearchDatastore_Task, 'duration_secs': 0.020963} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.403680] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.404075] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1/3d94d1b6-ba04-407d-9398-d4f7b21a7ee1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.404507] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-580466d1-0d74-4dba-86a0-6148ba93f4ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.411365] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 834.411684] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 834.412126] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.412530] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.412830] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 834.413197] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74c2a748-0c90-4839-9ed5-3b192aed2318 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.417038] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 834.417038] env[68233]: value = "task-2782360" [ 834.417038] env[68233]: _type = "Task" [ 834.417038] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.421819] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 834.422218] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 834.426323] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73cb65a7-ff6f-427d-90f3-5d6fcd2d0235 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.428988] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.435026] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 834.435026] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c482ca-24ec-00dd-e89d-6a2854924db1" [ 834.435026] env[68233]: _type = "Task" [ 834.435026] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.443801] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c482ca-24ec-00dd-e89d-6a2854924db1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.781283] env[68233]: DEBUG oslo_concurrency.lockutils [None req-877ffea0-ff7c-4b5a-adf0-a871dd42638b tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "135c2d22-26ac-41a4-a860-accc12dd4c9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.930s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.817053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.823757] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782358, 'name': Rename_Task, 'duration_secs': 0.160007} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.824155] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 834.824533] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-acab6560-1b85-457c-9555-3cacac90f542 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.834839] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 834.834839] env[68233]: value = "task-2782361" [ 834.834839] env[68233]: _type = "Task" [ 834.834839] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.849574] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.930190] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782360, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.948193] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c482ca-24ec-00dd-e89d-6a2854924db1, 'name': SearchDatastore_Task, 'duration_secs': 0.011571} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.948193] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b13bc3d0-59ac-410f-be76-a54850fdb7de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.952823] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 834.952823] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5254c2d4-042b-4ed1-8c94-0f20beb6a7a7" [ 834.952823] env[68233]: _type = "Task" [ 834.952823] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.961545] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5254c2d4-042b-4ed1-8c94-0f20beb6a7a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.319994] env[68233]: DEBUG nova.compute.manager [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68233) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 835.321034] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.348026] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782361, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.429057] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516387} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.429481] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1/3d94d1b6-ba04-407d-9398-d4f7b21a7ee1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.429945] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.432316] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1aeacd56-2a94-4785-b39b-50a379365a28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.438667] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 835.438667] env[68233]: value = "task-2782362" [ 835.438667] env[68233]: _type = "Task" [ 835.438667] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.451280] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782362, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.465752] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5254c2d4-042b-4ed1-8c94-0f20beb6a7a7, 'name': SearchDatastore_Task, 'duration_secs': 0.013196} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.466094] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.466439] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. {{(pid=68233) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 835.466747] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7b329d0-d686-4429-9d15-19494c002ba7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.475040] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 835.475040] env[68233]: value = "task-2782363" [ 835.475040] env[68233]: _type = "Task" [ 835.475040] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.483757] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.759479] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0129de9-1d97-42a7-a68a-61020f9c4cc0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.768895] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b4cefe-ea20-4a42-88e6-8f43fe0bf5e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.804679] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7841685-f108-401d-a1ac-f15c2aabea50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.816717] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f9aa38-b3e4-4b25-8802-59b6fc0e3e31 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.832362] env[68233]: DEBUG nova.compute.provider_tree [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.847039] env[68233]: DEBUG oslo_vmware.api [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782361, 'name': PowerOnVM_Task, 'duration_secs': 0.563098} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.847304] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 835.847501] env[68233]: INFO nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Took 11.65 seconds to spawn the instance on the hypervisor. [ 835.847681] env[68233]: DEBUG nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 835.848869] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4579c79c-c811-4004-8423-404fbeead857 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.949774] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782362, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074126} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.949774] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.950058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c4a0fb-83b9-46e8-a8f4-922060af719a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.982546] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Reconfiguring VM instance instance-0000003d to attach disk [datastore2] 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1/3d94d1b6-ba04-407d-9398-d4f7b21a7ee1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.982929] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aeb95aad-bf9c-4ee5-b8f3-9009761c8d69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.005915] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 836.005915] env[68233]: value = "task-2782364" [ 836.005915] env[68233]: _type = "Task" [ 836.005915] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.012392] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.017643] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782364, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.265229] env[68233]: DEBUG nova.network.neutron [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Successfully updated port: 1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 836.338145] env[68233]: DEBUG nova.scheduler.client.report [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 836.368608] env[68233]: INFO nova.compute.manager [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Took 49.07 seconds to build instance. [ 836.490794] env[68233]: DEBUG nova.compute.manager [req-67f282da-44d6-4fbf-948d-adef33ee3856 req-8399728b-3b82-48a4-92ab-c36230cefeea service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Received event network-vif-plugged-1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 836.491054] env[68233]: DEBUG oslo_concurrency.lockutils [req-67f282da-44d6-4fbf-948d-adef33ee3856 req-8399728b-3b82-48a4-92ab-c36230cefeea service nova] Acquiring lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.491307] env[68233]: DEBUG oslo_concurrency.lockutils [req-67f282da-44d6-4fbf-948d-adef33ee3856 req-8399728b-3b82-48a4-92ab-c36230cefeea service nova] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.491567] env[68233]: DEBUG oslo_concurrency.lockutils [req-67f282da-44d6-4fbf-948d-adef33ee3856 req-8399728b-3b82-48a4-92ab-c36230cefeea service nova] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.491753] env[68233]: DEBUG nova.compute.manager [req-67f282da-44d6-4fbf-948d-adef33ee3856 req-8399728b-3b82-48a4-92ab-c36230cefeea service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] No waiting events found dispatching network-vif-plugged-1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 836.493611] env[68233]: WARNING nova.compute.manager [req-67f282da-44d6-4fbf-948d-adef33ee3856 req-8399728b-3b82-48a4-92ab-c36230cefeea service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Received unexpected event network-vif-plugged-1592075d-4a4d-4e79-9eaa-57d357a45e17 for instance with vm_state building and task_state spawning. [ 836.496991] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782363, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.9157} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.497279] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. [ 836.498296] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2d662e6-5e88-40c5-8ba2-91be4fa48867 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.529230] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Reconfiguring VM instance instance-0000003b to attach disk [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.535116] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2979c341-0ee8-4205-9706-9b92a3ed455b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.554355] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782364, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.555785] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 836.555785] env[68233]: value = "task-2782365" [ 836.555785] env[68233]: _type = "Task" [ 836.555785] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.768090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "refresh_cache-4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.768090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired lock "refresh_cache-4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.768090] env[68233]: DEBUG nova.network.neutron [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.845563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.600s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.846199] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 836.850011] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.114s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.851711] env[68233]: INFO nova.compute.claims [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.870915] env[68233]: DEBUG oslo_concurrency.lockutils [None req-85a48852-8795-4344-b6e8-c84a2ee91162 tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "da2a5acb-0861-4225-a6b4-324482c480ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.193s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.035523] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782364, 'name': ReconfigVM_Task, 'duration_secs': 0.637924} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.036119] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Reconfigured VM instance instance-0000003d to attach disk [datastore2] 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1/3d94d1b6-ba04-407d-9398-d4f7b21a7ee1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.036501] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5a2f7e0-323d-4c70-8b93-4077947ea176 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.046049] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 837.046049] env[68233]: value = "task-2782366" [ 837.046049] env[68233]: _type = "Task" [ 837.046049] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.056201] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782366, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.068092] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782365, 'name': ReconfigVM_Task, 'duration_secs': 0.433785} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.068380] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Reconfigured VM instance instance-0000003b to attach disk [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 837.069396] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9358d7-816b-4bf5-96da-8311acca2d22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.099260] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-751ab0e4-f012-46e8-9290-116df514e207 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.119830] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 837.119830] env[68233]: value = "task-2782367" [ 837.119830] env[68233]: _type = "Task" [ 837.119830] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.129686] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.320226] env[68233]: DEBUG nova.network.neutron [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 837.359172] env[68233]: DEBUG nova.compute.utils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 837.361903] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 837.361903] env[68233]: DEBUG nova.network.neutron [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 837.373022] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 837.448370] env[68233]: DEBUG nova.policy [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '43c48242abf540fe99d95f3d2df541ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'baab6817c97645bcae2e08502b7f96db', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 837.495563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.495735] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.496012] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.497038] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.497038] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.498631] env[68233]: INFO nova.compute.manager [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Terminating instance [ 837.558018] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782366, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.598031] env[68233]: DEBUG nova.network.neutron [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Updating instance_info_cache with network_info: [{"id": "1592075d-4a4d-4e79-9eaa-57d357a45e17", "address": "fa:16:3e:4c:5a:e3", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1592075d-4a", "ovs_interfaceid": "1592075d-4a4d-4e79-9eaa-57d357a45e17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.631125] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.865534] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 837.897145] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 837.987361] env[68233]: DEBUG nova.network.neutron [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Successfully created port: 1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.002848] env[68233]: DEBUG nova.compute.manager [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.003153] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.004166] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d121e0-fc98-433e-b9d0-4a38ec0db259 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.017634] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.017902] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c8f50fe-108e-4dba-862f-000e6da7c86c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.032019] env[68233]: DEBUG oslo_vmware.api [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 838.032019] env[68233]: value = "task-2782368" [ 838.032019] env[68233]: _type = "Task" [ 838.032019] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.037502] env[68233]: DEBUG oslo_vmware.api [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.063617] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782366, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.068031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "da2a5acb-0861-4225-a6b4-324482c480ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.068031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "da2a5acb-0861-4225-a6b4-324482c480ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.068031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.068535] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.068535] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "da2a5acb-0861-4225-a6b4-324482c480ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.071203] env[68233]: INFO nova.compute.manager [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Terminating instance [ 838.100684] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Releasing lock "refresh_cache-4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 838.101194] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Instance network_info: |[{"id": "1592075d-4a4d-4e79-9eaa-57d357a45e17", "address": "fa:16:3e:4c:5a:e3", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1592075d-4a", "ovs_interfaceid": "1592075d-4a4d-4e79-9eaa-57d357a45e17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 838.102731] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:5a:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1592075d-4a4d-4e79-9eaa-57d357a45e17', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 838.112760] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating folder: Project (1a595904e02240fa8c04e471a0affef3). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.113377] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61f2fbcf-e986-497a-b9b1-a1f0507dd692 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.132339] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782367, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.133883] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Created folder: Project (1a595904e02240fa8c04e471a0affef3) in parent group-v559223. [ 838.134133] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating folder: Instances. Parent ref: group-v559389. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 838.134340] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b80d25a6-139e-4ddb-8822-82b7dfba2bf4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.155033] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Created folder: Instances in parent group-v559389. [ 838.155305] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 838.155508] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 838.155718] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9e5f1c2-dc17-4a86-be07-7706d7b073c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.181122] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 838.181122] env[68233]: value = "task-2782371" [ 838.181122] env[68233]: _type = "Task" [ 838.181122] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.191121] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782371, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.462835] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2980e8-b38d-47f4-8a90-96e95c837ba2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.472120] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ce2e41-bd8c-45e9-9907-8915aba827c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.507638] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849310bb-970b-4193-98d3-5a508aa11a99 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.516605] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3921ab43-2366-4e88-acbb-6e74cb7bbe15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.531402] env[68233]: DEBUG nova.compute.provider_tree [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.542057] env[68233]: DEBUG oslo_vmware.api [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782368, 'name': PowerOffVM_Task, 'duration_secs': 0.303627} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.542979] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 838.543166] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 838.543855] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f46d8bc4-a2a7-4706-bb3d-8ac4406454f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.558263] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782366, 'name': Rename_Task, 'duration_secs': 1.083326} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.558539] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.558782] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d4783ed2-f57d-4811-ac71-371e8214ca10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.566446] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 838.566446] env[68233]: value = "task-2782373" [ 838.566446] env[68233]: _type = "Task" [ 838.566446] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.575722] env[68233]: DEBUG nova.compute.manager [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 838.575945] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 838.576246] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.576992] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df722a5e-175b-492f-a6e7-e36eec786e34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.586020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 838.586461] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e609deaf-84d8-4e3c-966d-2cda7b95d50b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.594645] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 838.594645] env[68233]: value = "task-2782374" [ 838.594645] env[68233]: _type = "Task" [ 838.594645] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.604168] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.606340] env[68233]: DEBUG nova.compute.manager [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Received event network-changed-1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 838.606535] env[68233]: DEBUG nova.compute.manager [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Refreshing instance network info cache due to event network-changed-1592075d-4a4d-4e79-9eaa-57d357a45e17. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 838.606755] env[68233]: DEBUG oslo_concurrency.lockutils [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] Acquiring lock "refresh_cache-4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.606894] env[68233]: DEBUG oslo_concurrency.lockutils [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] Acquired lock "refresh_cache-4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 838.607065] env[68233]: DEBUG nova.network.neutron [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Refreshing network info cache for port 1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 838.632234] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782367, 'name': ReconfigVM_Task, 'duration_secs': 1.034417} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.632234] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.632234] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e238416-e285-49eb-84cf-ced093049699 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.640703] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 838.640703] env[68233]: value = "task-2782375" [ 838.640703] env[68233]: _type = "Task" [ 838.640703] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.645131] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 838.645387] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 838.645633] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Deleting the datastore file [datastore2] f2af60e6-496c-4edb-9e99-4b45fa94bfeb {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 838.646312] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5cc5ee90-eca1-452f-9446-acaf3cb66512 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.653511] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782375, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.654958] env[68233]: DEBUG oslo_vmware.api [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for the task: (returnval){ [ 838.654958] env[68233]: value = "task-2782376" [ 838.654958] env[68233]: _type = "Task" [ 838.654958] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.669279] env[68233]: DEBUG oslo_vmware.api [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.692233] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782371, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.878325] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 838.904827] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 838.905222] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.905318] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 838.905484] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.905677] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 838.905857] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 838.906102] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 838.906268] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 838.906430] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 838.906751] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 838.906751] env[68233]: DEBUG nova.virt.hardware [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 838.907614] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc501e0-3beb-4e21-a0b4-34830a8580bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.917091] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7384f506-b92b-4cf4-81e7-c0e96a710150 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.037607] env[68233]: DEBUG nova.scheduler.client.report [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 839.079286] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782373, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.105761] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782374, 'name': PowerOffVM_Task, 'duration_secs': 0.244839} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.106156] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.106413] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 839.106715] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20eff53a-a72f-4753-aaca-76b025ce7e61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.154018] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782375, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.165158] env[68233]: DEBUG oslo_vmware.api [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Task: {'id': task-2782376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210063} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.165468] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 839.165669] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 839.165852] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 839.166132] env[68233]: INFO nova.compute.manager [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Took 1.16 seconds to destroy the instance on the hypervisor. [ 839.166792] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 839.166792] env[68233]: DEBUG nova.compute.manager [-] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 839.166950] env[68233]: DEBUG nova.network.neutron [-] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 839.193515] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782371, 'name': CreateVM_Task, 'duration_secs': 0.537656} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.193683] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 839.194414] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.194583] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.194965] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 839.195257] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e38d1193-4e76-4be0-84d3-2b3a3e60d39c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.202293] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 839.202293] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e944cc-db5f-d3cf-47cc-a8f74b26f25d" [ 839.202293] env[68233]: _type = "Task" [ 839.202293] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.213344] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e944cc-db5f-d3cf-47cc-a8f74b26f25d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.250438] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 839.250672] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 839.250838] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Deleting the datastore file [datastore2] da2a5acb-0861-4225-a6b4-324482c480ea {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 839.251230] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99e3b03d-12f4-42e6-8bde-22b039fa1dab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.259408] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for the task: (returnval){ [ 839.259408] env[68233]: value = "task-2782378" [ 839.259408] env[68233]: _type = "Task" [ 839.259408] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.270660] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.543936] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.694s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.544255] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 839.547889] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.313s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 839.548091] env[68233]: DEBUG nova.objects.instance [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 839.583231] env[68233]: DEBUG oslo_vmware.api [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782373, 'name': PowerOnVM_Task, 'duration_secs': 0.668784} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.583621] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.584628] env[68233]: INFO nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Took 10.58 seconds to spawn the instance on the hypervisor. [ 839.584628] env[68233]: DEBUG nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.589915] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e055a316-f84b-493c-a9c9-b3930a4e7ba3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.614104] env[68233]: DEBUG nova.network.neutron [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Updated VIF entry in instance network info cache for port 1592075d-4a4d-4e79-9eaa-57d357a45e17. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 839.614592] env[68233]: DEBUG nova.network.neutron [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Updating instance_info_cache with network_info: [{"id": "1592075d-4a4d-4e79-9eaa-57d357a45e17", "address": "fa:16:3e:4c:5a:e3", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1592075d-4a", "ovs_interfaceid": "1592075d-4a4d-4e79-9eaa-57d357a45e17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.652667] env[68233]: DEBUG oslo_vmware.api [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782375, 'name': PowerOnVM_Task, 'duration_secs': 0.546719} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.653075] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.655871] env[68233]: DEBUG nova.compute.manager [None req-3cdb7056-78f2-4817-812a-e916e1df41c7 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.656659] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00803270-dfc0-4945-9f79-181e5232d0c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.714013] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e944cc-db5f-d3cf-47cc-a8f74b26f25d, 'name': SearchDatastore_Task, 'duration_secs': 0.013582} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.714391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.714631] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 839.714866] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.715040] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 839.715244] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 839.715504] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-468f7554-765d-4f84-b7f6-9c23f1cc3577 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.737896] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 839.737896] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 839.738765] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b50319bb-a0d6-47d6-9fa5-b1d3cc9f5de6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.746370] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 839.746370] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f40de2-3360-3dcf-7c20-fd0da5e43286" [ 839.746370] env[68233]: _type = "Task" [ 839.746370] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.759798] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f40de2-3360-3dcf-7c20-fd0da5e43286, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.768624] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.043525] env[68233]: DEBUG nova.network.neutron [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Successfully updated port: 1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 840.060718] env[68233]: DEBUG nova.compute.utils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 840.060718] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 840.060718] env[68233]: DEBUG nova.network.neutron [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 840.112106] env[68233]: INFO nova.compute.manager [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Took 46.18 seconds to build instance. [ 840.117921] env[68233]: DEBUG oslo_concurrency.lockutils [req-0d8d8051-d04f-4a95-b116-e4b5ad52303e req-01668582-86b7-4d6f-b90a-40c16c4cf382 service nova] Releasing lock "refresh_cache-4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.160230] env[68233]: DEBUG nova.policy [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fad204b45934ee18bdf27783c32f4a3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a595904e02240fa8c04e471a0affef3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 840.259173] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f40de2-3360-3dcf-7c20-fd0da5e43286, 'name': SearchDatastore_Task, 'duration_secs': 0.023605} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.260043] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ae2d5d0-97a6-485c-b16c-6a8d6f6ae419 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.262513] env[68233]: DEBUG nova.network.neutron [-] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.280615] env[68233]: DEBUG oslo_vmware.api [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Task: {'id': task-2782378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.710524} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.280852] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 840.280852] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5225cafa-bf40-46c9-9006-598eab5e94b4" [ 840.280852] env[68233]: _type = "Task" [ 840.280852] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.281883] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 840.282160] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 840.282405] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 840.282779] env[68233]: INFO nova.compute.manager [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Took 1.71 seconds to destroy the instance on the hypervisor. [ 840.283152] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 840.283723] env[68233]: DEBUG nova.compute.manager [-] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 840.283882] env[68233]: DEBUG nova.network.neutron [-] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 840.295829] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5225cafa-bf40-46c9-9006-598eab5e94b4, 'name': SearchDatastore_Task, 'duration_secs': 0.012347} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.296287] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 840.296452] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98/4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 840.296744] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cecf5cb-1a55-460e-ba92-c7619d2ffa32 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.306331] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 840.306331] env[68233]: value = "task-2782379" [ 840.306331] env[68233]: _type = "Task" [ 840.306331] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.316395] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782379, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.548662] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.548662] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.548662] env[68233]: DEBUG nova.network.neutron [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 840.559515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-49fbf79c-5046-4982-9ca4-8c36f2f6741b tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.560923] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.900s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.561219] env[68233]: DEBUG nova.objects.instance [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lazy-loading 'resources' on Instance uuid 65f9fe09-97dc-4988-bae4-243d60e33be9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.571486] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 840.615047] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eafc043-96cf-492d-a9a9-5862a6d88190 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.998s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.673208] env[68233]: DEBUG nova.compute.manager [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Received event network-vif-deleted-84572635-d33f-44cd-8a87-a9af1019bf50 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.673433] env[68233]: DEBUG nova.compute.manager [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Received event network-vif-plugged-1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.673629] env[68233]: DEBUG oslo_concurrency.lockutils [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] Acquiring lock "03688e90-5433-47ca-baaa-75861ad093b7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.673835] env[68233]: DEBUG oslo_concurrency.lockutils [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] Lock "03688e90-5433-47ca-baaa-75861ad093b7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.675649] env[68233]: DEBUG oslo_concurrency.lockutils [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] Lock "03688e90-5433-47ca-baaa-75861ad093b7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.676406] env[68233]: DEBUG nova.compute.manager [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] No waiting events found dispatching network-vif-plugged-1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 840.676948] env[68233]: WARNING nova.compute.manager [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Received unexpected event network-vif-plugged-1d7d2812-9e77-463b-be77-4b04e6365498 for instance with vm_state building and task_state spawning. [ 840.677381] env[68233]: DEBUG nova.compute.manager [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Received event network-changed-1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 840.677578] env[68233]: DEBUG nova.compute.manager [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Refreshing instance network info cache due to event network-changed-1d7d2812-9e77-463b-be77-4b04e6365498. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 840.677927] env[68233]: DEBUG oslo_concurrency.lockutils [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] Acquiring lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.765260] env[68233]: INFO nova.compute.manager [-] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Took 1.60 seconds to deallocate network for instance. [ 840.822709] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782379, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.891510] env[68233]: DEBUG nova.network.neutron [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Successfully created port: d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.120305] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.134966] env[68233]: DEBUG nova.network.neutron [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 841.272575] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.319833] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782379, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.586423] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 841.592088] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.592621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.592937] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.593250] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.593583] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.595930] env[68233]: INFO nova.compute.manager [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Terminating instance [ 841.614580] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 841.614804] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 841.614963] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 841.615150] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 841.615952] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 841.615952] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 841.615952] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 841.615952] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 841.615952] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 841.616759] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 841.617023] env[68233]: DEBUG nova.virt.hardware [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 841.617859] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff2ded6-6453-4364-bbc5-4ba46878be3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.635422] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b227aa6-d317-4f1d-ba2b-a0083231cdfb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.653405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.663956] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe3c351-e0f4-46a2-9d5b-a5057b0d9278 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.675739] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00feb13a-b0af-4ceb-b3a6-79398ac42192 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.713332] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c378533-a983-4022-b145-cf318627d72c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.720716] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08eea8d2-99ab-4ac0-99df-5d1c055b3711 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.737016] env[68233]: DEBUG nova.compute.provider_tree [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 841.781707] env[68233]: DEBUG nova.network.neutron [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Updating instance_info_cache with network_info: [{"id": "1d7d2812-9e77-463b-be77-4b04e6365498", "address": "fa:16:3e:0b:d3:a8", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d7d2812-9e", "ovs_interfaceid": "1d7d2812-9e77-463b-be77-4b04e6365498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.819195] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782379, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.107022] env[68233]: DEBUG nova.compute.manager [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 842.107022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 842.107022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed17f143-61b5-4bbc-a61d-e21a6f68990f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.116689] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 842.116902] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-505e07a8-453a-428d-8f54-4710a4bf2b60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.125148] env[68233]: DEBUG oslo_vmware.api [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 842.125148] env[68233]: value = "task-2782380" [ 842.125148] env[68233]: _type = "Task" [ 842.125148] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.138869] env[68233]: DEBUG oslo_vmware.api [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.170591] env[68233]: DEBUG nova.network.neutron [-] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.242387] env[68233]: DEBUG nova.scheduler.client.report [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 842.284961] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.285325] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Instance network_info: |[{"id": "1d7d2812-9e77-463b-be77-4b04e6365498", "address": "fa:16:3e:0b:d3:a8", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d7d2812-9e", "ovs_interfaceid": "1d7d2812-9e77-463b-be77-4b04e6365498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 842.285629] env[68233]: DEBUG oslo_concurrency.lockutils [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] Acquired lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.285807] env[68233]: DEBUG nova.network.neutron [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Refreshing network info cache for port 1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 842.287082] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:d3:a8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d7d2812-9e77-463b-be77-4b04e6365498', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 842.304689] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 842.308057] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 842.308391] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c70be2c-f5f9-4c10-9b7a-7e6f215d6906 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.336612] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782379, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.570682} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.338083] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98/4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 842.338337] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 842.338916] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 842.338916] env[68233]: value = "task-2782381" [ 842.338916] env[68233]: _type = "Task" [ 842.338916] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.339140] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4ded234-34a8-42df-ae33-5622d026606d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.353035] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782381, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.353035] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 842.353035] env[68233]: value = "task-2782382" [ 842.353035] env[68233]: _type = "Task" [ 842.353035] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.361470] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782382, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.577516] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "3cca16e1-3363-4026-9359-4ed2ba41e25d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.577765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.637238] env[68233]: DEBUG oslo_vmware.api [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782380, 'name': PowerOffVM_Task, 'duration_secs': 0.330466} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.637238] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 842.637238] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 842.637675] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17540885-51d1-47d0-bb00-c6f1a9f2971f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.678147] env[68233]: INFO nova.compute.manager [-] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Took 2.39 seconds to deallocate network for instance. [ 842.701851] env[68233]: DEBUG nova.network.neutron [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Updated VIF entry in instance network info cache for port 1d7d2812-9e77-463b-be77-4b04e6365498. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 842.703176] env[68233]: DEBUG nova.network.neutron [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Updating instance_info_cache with network_info: [{"id": "1d7d2812-9e77-463b-be77-4b04e6365498", "address": "fa:16:3e:0b:d3:a8", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d7d2812-9e", "ovs_interfaceid": "1d7d2812-9e77-463b-be77-4b04e6365498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.730555] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 842.730778] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 842.731025] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore2] 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 842.731357] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d9feba8-69fa-4fa3-b433-a77ffbe2e6e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.740577] env[68233]: DEBUG oslo_vmware.api [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 842.740577] env[68233]: value = "task-2782384" [ 842.740577] env[68233]: _type = "Task" [ 842.740577] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.751364] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.190s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.753130] env[68233]: DEBUG oslo_vmware.api [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.757019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.956s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.757019] env[68233]: DEBUG nova.objects.instance [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lazy-loading 'resources' on Instance uuid 09e4644d-d845-47f4-8748-925f739863b9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 842.780479] env[68233]: INFO nova.scheduler.client.report [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Deleted allocations for instance 65f9fe09-97dc-4988-bae4-243d60e33be9 [ 842.853800] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782381, 'name': CreateVM_Task, 'duration_secs': 0.41721} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.857961] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.857961] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.857961] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.858164] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 842.858774] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45014584-6357-4dbf-8f15-93f94c6ae79c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.868261] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782382, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082999} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.869741] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 842.870111] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 842.870111] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f37f9a-9d6b-ce4d-4ce3-a784633f699f" [ 842.870111] env[68233]: _type = "Task" [ 842.870111] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.870936] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93a2b49-e5c8-45d9-af3b-4c8553020a9c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.900026] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Reconfiguring VM instance instance-0000003e to attach disk [datastore2] 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98/4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 842.905058] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9422759d-924b-49d1-800e-009ca98bb619 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.921310] env[68233]: DEBUG nova.compute.manager [req-0032ad6b-2656-4fd0-a92e-823e6ce24cce req-4b572f3d-5eee-4be9-a730-a1fdea7f7dab service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received event network-vif-deleted-25a5eb6c-78b9-4400-86e5-a52be08b80e4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 842.921310] env[68233]: DEBUG nova.compute.manager [req-0032ad6b-2656-4fd0-a92e-823e6ce24cce req-4b572f3d-5eee-4be9-a730-a1fdea7f7dab service nova] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Received event network-vif-deleted-c35d0002-3d03-4b47-bc5d-c4f180e895e8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 842.927192] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f37f9a-9d6b-ce4d-4ce3-a784633f699f, 'name': SearchDatastore_Task, 'duration_secs': 0.014222} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.927192] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.927192] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.927192] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.927312] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 842.927481] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.928607] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.928858] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.929069] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a74afd2-8163-41e0-a903-fae819ced7d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.937911] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 842.937911] env[68233]: value = "task-2782385" [ 842.937911] env[68233]: _type = "Task" [ 842.937911] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.940738] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.941693] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.941796] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a828046-1d7c-442c-be09-263b902bec94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.947260] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782385, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.951029] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 842.951029] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241c5af-3e4d-1b97-f728-ac85b539d276" [ 842.951029] env[68233]: _type = "Task" [ 842.951029] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.960016] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241c5af-3e4d-1b97-f728-ac85b539d276, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.185464] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.188957] env[68233]: DEBUG nova.network.neutron [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Successfully updated port: d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.205052] env[68233]: DEBUG oslo_concurrency.lockutils [req-3bb558ae-7117-4ccc-9991-009c3d6a1e47 req-222438bf-d162-4fb9-85d4-4d0dcf8e65d7 service nova] Releasing lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.251880] env[68233]: DEBUG oslo_vmware.api [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.336745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.252613] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 843.252613] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 843.252613] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 843.252613] env[68233]: INFO nova.compute.manager [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Took 1.15 seconds to destroy the instance on the hypervisor. [ 843.252905] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 843.253108] env[68233]: DEBUG nova.compute.manager [-] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 843.253212] env[68233]: DEBUG nova.network.neutron [-] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 843.287290] env[68233]: DEBUG oslo_concurrency.lockutils [None req-616bb65d-8c33-406e-bdcd-7419a6c72b21 tempest-ServersAaction247Test-1465870177 tempest-ServersAaction247Test-1465870177-project-member] Lock "65f9fe09-97dc-4988-bae4-243d60e33be9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.962s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.448437] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.462054] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241c5af-3e4d-1b97-f728-ac85b539d276, 'name': SearchDatastore_Task, 'duration_secs': 0.013174} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.466566] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c75de96-a79d-4757-bfae-791bb3d8bc4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.471995] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 843.471995] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c9af9f-bfd7-bede-a32e-55a7f2fd79fc" [ 843.471995] env[68233]: _type = "Task" [ 843.471995] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.479956] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c9af9f-bfd7-bede-a32e-55a7f2fd79fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.691024] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "refresh_cache-8880bb83-56f1-4ad2-9d6d-1885826aed21" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.691188] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired lock "refresh_cache-8880bb83-56f1-4ad2-9d6d-1885826aed21" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.691224] env[68233]: DEBUG nova.network.neutron [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.757462] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd98763-6c4d-4a54-bcb3-3445ce8ee432 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.767377] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b03f8eb-9c09-480a-b413-06ab3e67951e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.799590] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b876900a-417c-4159-9d03-bc927fb324e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.807922] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcee9302-8deb-4305-8fa8-0547f081a577 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.822751] env[68233]: DEBUG nova.compute.provider_tree [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.948424] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782385, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.982440] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c9af9f-bfd7-bede-a32e-55a7f2fd79fc, 'name': SearchDatastore_Task, 'duration_secs': 0.022575} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.982702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.982957] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 03688e90-5433-47ca-baaa-75861ad093b7/03688e90-5433-47ca-baaa-75861ad093b7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.983261] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a31388a-9834-496d-8b4b-946f9c92af0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.989971] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 843.989971] env[68233]: value = "task-2782386" [ 843.989971] env[68233]: _type = "Task" [ 843.989971] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.998093] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.121965] env[68233]: DEBUG nova.network.neutron [-] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.229169] env[68233]: DEBUG nova.network.neutron [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 844.326383] env[68233]: DEBUG nova.scheduler.client.report [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 844.375041] env[68233]: DEBUG nova.network.neutron [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Updating instance_info_cache with network_info: [{"id": "d6e53ccd-3915-433a-a12d-2a02f08a9ab2", "address": "fa:16:3e:c7:29:02", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e53ccd-39", "ovs_interfaceid": "d6e53ccd-3915-433a-a12d-2a02f08a9ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.449598] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782385, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.499525] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.624944] env[68233]: INFO nova.compute.manager [-] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Took 1.37 seconds to deallocate network for instance. [ 844.833809] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.080s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.836880] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.391s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.838450] env[68233]: INFO nova.compute.claims [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.854811] env[68233]: INFO nova.scheduler.client.report [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Deleted allocations for instance 09e4644d-d845-47f4-8748-925f739863b9 [ 844.878466] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Releasing lock "refresh_cache-8880bb83-56f1-4ad2-9d6d-1885826aed21" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.879212] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Instance network_info: |[{"id": "d6e53ccd-3915-433a-a12d-2a02f08a9ab2", "address": "fa:16:3e:c7:29:02", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e53ccd-39", "ovs_interfaceid": "d6e53ccd-3915-433a-a12d-2a02f08a9ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 844.879679] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:29:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56b944d8-803d-43f2-945d-0f334ee4ea1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd6e53ccd-3915-433a-a12d-2a02f08a9ab2', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.893545] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.893545] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.893545] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87168109-f077-47c2-bfee-3bf4a47fe0e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.918052] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.918052] env[68233]: value = "task-2782387" [ 844.918052] env[68233]: _type = "Task" [ 844.918052] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.928892] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782387, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.950183] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782385, 'name': ReconfigVM_Task, 'duration_secs': 1.69233} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.950578] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Reconfigured VM instance instance-0000003e to attach disk [datastore2] 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98/4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.951378] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c38881e5-08cd-45fc-b91a-bdd5ad72489e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.959219] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 844.959219] env[68233]: value = "task-2782388" [ 844.959219] env[68233]: _type = "Task" [ 844.959219] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.968318] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782388, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.977321] env[68233]: DEBUG nova.compute.manager [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Received event network-vif-plugged-d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 844.977321] env[68233]: DEBUG oslo_concurrency.lockutils [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] Acquiring lock "8880bb83-56f1-4ad2-9d6d-1885826aed21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.977321] env[68233]: DEBUG oslo_concurrency.lockutils [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.977481] env[68233]: DEBUG oslo_concurrency.lockutils [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.977535] env[68233]: DEBUG nova.compute.manager [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] No waiting events found dispatching network-vif-plugged-d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 844.978371] env[68233]: WARNING nova.compute.manager [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Received unexpected event network-vif-plugged-d6e53ccd-3915-433a-a12d-2a02f08a9ab2 for instance with vm_state building and task_state spawning. [ 844.978627] env[68233]: DEBUG nova.compute.manager [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Received event network-changed-d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 844.978804] env[68233]: DEBUG nova.compute.manager [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Refreshing instance network info cache due to event network-changed-d6e53ccd-3915-433a-a12d-2a02f08a9ab2. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 844.979032] env[68233]: DEBUG oslo_concurrency.lockutils [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] Acquiring lock "refresh_cache-8880bb83-56f1-4ad2-9d6d-1885826aed21" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.979186] env[68233]: DEBUG oslo_concurrency.lockutils [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] Acquired lock "refresh_cache-8880bb83-56f1-4ad2-9d6d-1885826aed21" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.979349] env[68233]: DEBUG nova.network.neutron [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Refreshing network info cache for port d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 845.001455] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782386, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.131303] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.362936] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a1daf0f9-a55d-42c0-95b0-1700c025b41e tempest-ServerMetadataTestJSON-345786485 tempest-ServerMetadataTestJSON-345786485-project-member] Lock "09e4644d-d845-47f4-8748-925f739863b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.108s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 845.428619] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782387, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.470103] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782388, 'name': Rename_Task, 'duration_secs': 0.260364} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.472259] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 845.472259] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be15378a-b7e3-47a8-9aae-05a672ec8756 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.478826] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 845.478826] env[68233]: value = "task-2782389" [ 845.478826] env[68233]: _type = "Task" [ 845.478826] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.488393] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.503522] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782386, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.050397} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.503791] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 03688e90-5433-47ca-baaa-75861ad093b7/03688e90-5433-47ca-baaa-75861ad093b7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 845.503791] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 845.504057] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4c5f15b-8666-4f80-8fa2-07f72e1f927b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.514280] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 845.514280] env[68233]: value = "task-2782390" [ 845.514280] env[68233]: _type = "Task" [ 845.514280] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.521800] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.850229] env[68233]: DEBUG nova.network.neutron [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Updated VIF entry in instance network info cache for port d6e53ccd-3915-433a-a12d-2a02f08a9ab2. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.850576] env[68233]: DEBUG nova.network.neutron [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Updating instance_info_cache with network_info: [{"id": "d6e53ccd-3915-433a-a12d-2a02f08a9ab2", "address": "fa:16:3e:c7:29:02", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd6e53ccd-39", "ovs_interfaceid": "d6e53ccd-3915-433a-a12d-2a02f08a9ab2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.931414] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782387, 'name': CreateVM_Task, 'duration_secs': 0.852539} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.931659] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.932401] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.932625] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.932977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.933306] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21c8e0a0-6e2e-4141-a8b5-406b69326a06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.938707] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 845.938707] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52200f3c-fe27-cb23-4b55-b4518eb1c7aa" [ 845.938707] env[68233]: _type = "Task" [ 845.938707] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.949853] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52200f3c-fe27-cb23-4b55-b4518eb1c7aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.990570] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782389, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.022043] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077057} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.024525] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 846.028246] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfbbc2e-9884-4a30-bab4-0e8f27e0506b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.048369] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Reconfiguring VM instance instance-0000003f to attach disk [datastore2] 03688e90-5433-47ca-baaa-75861ad093b7/03688e90-5433-47ca-baaa-75861ad093b7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 846.051227] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed955610-30b2-4b4a-aa2d-538fac2511fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.074791] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 846.074791] env[68233]: value = "task-2782391" [ 846.074791] env[68233]: _type = "Task" [ 846.074791] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.087764] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782391, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.355267] env[68233]: DEBUG oslo_concurrency.lockutils [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] Releasing lock "refresh_cache-8880bb83-56f1-4ad2-9d6d-1885826aed21" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.355563] env[68233]: DEBUG nova.compute.manager [req-b74a8422-91e5-4809-884e-f50f078c1a89 req-eb128114-91f7-456c-9a65-a2cdc8241e7d service nova] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Received event network-vif-deleted-6aca81ac-c770-4892-bc3b-9d9096c8656e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 846.419837] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1045a765-0157-40ec-bc1d-81fe30444fe4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.428088] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15f7752-d33b-4302-82b4-cf8089bd8e6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.465595] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea2b9d2-cd0b-4b04-b56d-0591ad09050c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.477517] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abdbadf2-e223-46cf-b83e-abd5a3631994 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.481210] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52200f3c-fe27-cb23-4b55-b4518eb1c7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.067464} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.481210] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.481634] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.481676] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.481806] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.482080] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.485172] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a35ea3c-b7fa-47bb-8aa9-60389a8502bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.495440] env[68233]: DEBUG nova.compute.provider_tree [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.500655] env[68233]: DEBUG oslo_vmware.api [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782389, 'name': PowerOnVM_Task, 'duration_secs': 0.930771} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.500655] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.500795] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.501827] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 846.502069] env[68233]: INFO nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Took 12.50 seconds to spawn the instance on the hypervisor. [ 846.502259] env[68233]: DEBUG nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 846.502496] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cab6d43-29cb-45d2-8640-d093e5e9a10c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.505972] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94937756-f035-42af-a773-b2d310801ef0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.517728] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 846.517728] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f1fc31-72dc-a9a9-589f-e3ab6923cd42" [ 846.517728] env[68233]: _type = "Task" [ 846.517728] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.527512] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f1fc31-72dc-a9a9-589f-e3ab6923cd42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.585112] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782391, 'name': ReconfigVM_Task, 'duration_secs': 0.331791} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.585424] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Reconfigured VM instance instance-0000003f to attach disk [datastore2] 03688e90-5433-47ca-baaa-75861ad093b7/03688e90-5433-47ca-baaa-75861ad093b7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 846.586137] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2158dc86-ff30-4766-af56-763d316106c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.593176] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 846.593176] env[68233]: value = "task-2782392" [ 846.593176] env[68233]: _type = "Task" [ 846.593176] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.603932] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782392, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.005018] env[68233]: DEBUG nova.scheduler.client.report [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 847.040621] env[68233]: INFO nova.compute.manager [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Took 52.13 seconds to build instance. [ 847.042188] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f1fc31-72dc-a9a9-589f-e3ab6923cd42, 'name': SearchDatastore_Task, 'duration_secs': 0.013424} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.045734] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-927a3c0a-f2f8-4f75-a523-de78605e3022 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.051992] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 847.051992] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520c01e8-ce90-f35e-8194-0b239e68d86b" [ 847.051992] env[68233]: _type = "Task" [ 847.051992] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.065309] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520c01e8-ce90-f35e-8194-0b239e68d86b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.107716] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782392, 'name': Rename_Task, 'duration_secs': 0.161435} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.108045] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 847.108650] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a24b5395-65c2-47e5-a0b7-37e2ce555f66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.116179] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 847.116179] env[68233]: value = "task-2782393" [ 847.116179] env[68233]: _type = "Task" [ 847.116179] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.124796] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.508615] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.509271] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 847.511888] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.694s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.512139] env[68233]: DEBUG nova.objects.instance [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lazy-loading 'resources' on Instance uuid 175ced9c-52f6-4577-a010-8fffc2876e6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 847.543933] env[68233]: DEBUG oslo_concurrency.lockutils [None req-aca1c720-2f0d-4e84-9306-109ff998c4bd tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.155s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.565492] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520c01e8-ce90-f35e-8194-0b239e68d86b, 'name': SearchDatastore_Task, 'duration_secs': 0.01291} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.565766] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.566036] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 8880bb83-56f1-4ad2-9d6d-1885826aed21/8880bb83-56f1-4ad2-9d6d-1885826aed21.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.566537] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ae20aee-7a44-41b9-8d83-1af36238efbc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.575177] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 847.575177] env[68233]: value = "task-2782394" [ 847.575177] env[68233]: _type = "Task" [ 847.575177] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.584835] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782394, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.626138] env[68233]: DEBUG oslo_vmware.api [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782393, 'name': PowerOnVM_Task, 'duration_secs': 0.502059} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.626422] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.626622] env[68233]: INFO nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Took 8.75 seconds to spawn the instance on the hypervisor. [ 847.626811] env[68233]: DEBUG nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 847.627618] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01a8c9c-5551-46d4-95f0-d4c86f00227f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.016155] env[68233]: DEBUG nova.compute.utils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 848.021368] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 848.021565] env[68233]: DEBUG nova.network.neutron [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 848.047750] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 848.086556] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782394, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.148116] env[68233]: DEBUG nova.policy [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '839b33e7aa11482882403ddc2319583f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '853a057cfba3400ba05c89cb1d292f61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 848.150465] env[68233]: INFO nova.compute.manager [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Took 48.94 seconds to build instance. [ 848.522484] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 848.577053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.595698] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782394, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52694} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.596293] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 8880bb83-56f1-4ad2-9d6d-1885826aed21/8880bb83-56f1-4ad2-9d6d-1885826aed21.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.596293] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.596494] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a800f1e3-481e-47e8-8dd8-5b3db621db04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.606240] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 848.606240] env[68233]: value = "task-2782395" [ 848.606240] env[68233]: _type = "Task" [ 848.606240] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.618758] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782395, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.630778] env[68233]: DEBUG nova.network.neutron [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Successfully created port: 99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.642247] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4687a377-2850-4ca7-99cf-075225b1cdcb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.651989] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826c3b3e-0d4d-416c-a40c-d3d26f2b1ab5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.655650] env[68233]: DEBUG oslo_concurrency.lockutils [None req-42a674a5-0444-4afb-b544-9dc61efe7619 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "03688e90-5433-47ca-baaa-75861ad093b7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.931s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.690713] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09b5bbe-ed23-45a3-8dfb-2c97850040c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.699619] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fb2237-470c-4d17-908f-f40a8d66722a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.718019] env[68233]: DEBUG nova.compute.provider_tree [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.723342] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] Acquiring lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.723388] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] Acquired lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.723560] env[68233]: DEBUG nova.network.neutron [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 849.122888] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782395, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184834} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.123302] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.124127] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ed662d-57a1-4ee7-bbae-f93ed7ca08c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.150838] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 8880bb83-56f1-4ad2-9d6d-1885826aed21/8880bb83-56f1-4ad2-9d6d-1885826aed21.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.152587] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a063abea-a0cf-4185-a05d-6db65060ab01 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.166342] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 849.176294] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 849.176294] env[68233]: value = "task-2782396" [ 849.176294] env[68233]: _type = "Task" [ 849.176294] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.187212] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.221017] env[68233]: DEBUG nova.scheduler.client.report [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.536341] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 849.577963] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 849.577963] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.577963] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 849.577963] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.577963] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 849.578206] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 849.578374] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 849.579026] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 849.579026] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 849.579026] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 849.579276] env[68233]: DEBUG nova.virt.hardware [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 849.580143] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4438bbb7-caf3-4abd-842a-1f548cd0e07a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.590859] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624a02f0-49a6-444f-8b73-12f96f53b37d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.688738] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782396, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.694650] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.731603] env[68233]: DEBUG nova.network.neutron [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Updating instance_info_cache with network_info: [{"id": "1d7d2812-9e77-463b-be77-4b04e6365498", "address": "fa:16:3e:0b:d3:a8", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d7d2812-9e", "ovs_interfaceid": "1d7d2812-9e77-463b-be77-4b04e6365498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.736913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.222s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.736913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.341s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.736913] env[68233]: DEBUG nova.objects.instance [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lazy-loading 'resources' on Instance uuid b5e9ef73-2203-42b4-bee0-76d439ffaa17 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.757934] env[68233]: INFO nova.scheduler.client.report [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Deleted allocations for instance 175ced9c-52f6-4577-a010-8fffc2876e6a [ 850.199156] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782396, 'name': ReconfigVM_Task, 'duration_secs': 0.956609} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.199868] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 8880bb83-56f1-4ad2-9d6d-1885826aed21/8880bb83-56f1-4ad2-9d6d-1885826aed21.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.200277] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-71bd9a8e-22af-4deb-a9c0-394bbf8ae37b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.214277] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 850.214277] env[68233]: value = "task-2782397" [ 850.214277] env[68233]: _type = "Task" [ 850.214277] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.230151] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782397, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.237331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] Releasing lock "refresh_cache-03688e90-5433-47ca-baaa-75861ad093b7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.237441] env[68233]: DEBUG nova.compute.manager [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Inject network info {{(pid=68233) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 850.237747] env[68233]: DEBUG nova.compute.manager [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] network_info to inject: |[{"id": "1d7d2812-9e77-463b-be77-4b04e6365498", "address": "fa:16:3e:0b:d3:a8", "network": {"id": "e992eb30-1a74-4062-9c30-b0285cf6cee7", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-785072446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "baab6817c97645bcae2e08502b7f96db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1316f5aa-529f-4bac-8dd7-6076a9d43312", "external-id": "nsx-vlan-transportzone-399", "segmentation_id": 399, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d7d2812-9e", "ovs_interfaceid": "1d7d2812-9e77-463b-be77-4b04e6365498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 850.244407] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Reconfiguring VM instance to set the machine id {{(pid=68233) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 850.247545] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ccff243-fd2d-4a57-945d-c97816ce0731 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.266772] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87adf30f-4b94-4ec7-b820-e6a6a5f1569a tempest-ServerShowV257Test-61235335 tempest-ServerShowV257Test-61235335-project-member] Lock "175ced9c-52f6-4577-a010-8fffc2876e6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.729s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.272431] env[68233]: DEBUG oslo_vmware.api [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] Waiting for the task: (returnval){ [ 850.272431] env[68233]: value = "task-2782398" [ 850.272431] env[68233]: _type = "Task" [ 850.272431] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.291726] env[68233]: DEBUG oslo_vmware.api [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] Task: {'id': task-2782398, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.396855] env[68233]: DEBUG nova.network.neutron [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Successfully updated port: 99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.599499] env[68233]: DEBUG nova.compute.manager [req-8b06fa08-ea54-45dd-b0c9-f047695ec358 req-3eef1661-d837-4732-8a75-e5ab0a9a5160 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Received event network-vif-plugged-99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 850.599916] env[68233]: DEBUG oslo_concurrency.lockutils [req-8b06fa08-ea54-45dd-b0c9-f047695ec358 req-3eef1661-d837-4732-8a75-e5ab0a9a5160 service nova] Acquiring lock "c5b42243-878f-4150-a5d3-63d69e474bd1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.599916] env[68233]: DEBUG oslo_concurrency.lockutils [req-8b06fa08-ea54-45dd-b0c9-f047695ec358 req-3eef1661-d837-4732-8a75-e5ab0a9a5160 service nova] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.600104] env[68233]: DEBUG oslo_concurrency.lockutils [req-8b06fa08-ea54-45dd-b0c9-f047695ec358 req-3eef1661-d837-4732-8a75-e5ab0a9a5160 service nova] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 850.600271] env[68233]: DEBUG nova.compute.manager [req-8b06fa08-ea54-45dd-b0c9-f047695ec358 req-3eef1661-d837-4732-8a75-e5ab0a9a5160 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] No waiting events found dispatching network-vif-plugged-99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 850.600437] env[68233]: WARNING nova.compute.manager [req-8b06fa08-ea54-45dd-b0c9-f047695ec358 req-3eef1661-d837-4732-8a75-e5ab0a9a5160 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Received unexpected event network-vif-plugged-99d9f200-1c6c-4ab7-9482-aa557fad7711 for instance with vm_state building and task_state spawning. [ 850.731928] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782397, 'name': Rename_Task, 'duration_secs': 0.182737} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.732425] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.732715] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e509c15-73c0-48c0-a111-2d418cba4ff1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.742021] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 850.742021] env[68233]: value = "task-2782399" [ 850.742021] env[68233]: _type = "Task" [ 850.742021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.751851] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.783730] env[68233]: DEBUG oslo_vmware.api [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] Task: {'id': task-2782398, 'name': ReconfigVM_Task, 'duration_secs': 0.222235} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.786553] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d6b985e2-1e78-4a4c-8eb2-94a5cf03ebfd tempest-ServersAdminTestJSON-1737428178 tempest-ServersAdminTestJSON-1737428178-project-admin] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Reconfigured VM instance to set the machine id {{(pid=68233) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 850.903622] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "refresh_cache-c5b42243-878f-4150-a5d3-63d69e474bd1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.903622] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "refresh_cache-c5b42243-878f-4150-a5d3-63d69e474bd1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.903622] env[68233]: DEBUG nova.network.neutron [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.904659] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66477997-8803-4486-9577-01b827f2ecf1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.913632] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d73bbf6-9b8e-4cd6-aba7-26bfdcf33fec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.947946] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bad110-cfd7-4cb0-a7c1-187f10aadaa3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.957956] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0012c041-4a66-4df4-a9cc-c7ad8a82a512 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.978381] env[68233]: DEBUG nova.compute.provider_tree [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.253133] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782399, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.440892] env[68233]: DEBUG nova.network.neutron [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.450133] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.450387] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.482884] env[68233]: DEBUG nova.scheduler.client.report [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.718145] env[68233]: DEBUG nova.network.neutron [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Updating instance_info_cache with network_info: [{"id": "99d9f200-1c6c-4ab7-9482-aa557fad7711", "address": "fa:16:3e:07:51:b3", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d9f200-1c", "ovs_interfaceid": "99d9f200-1c6c-4ab7-9482-aa557fad7711", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.754354] env[68233]: DEBUG oslo_vmware.api [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782399, 'name': PowerOnVM_Task, 'duration_secs': 0.972592} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.754354] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.754354] env[68233]: INFO nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Took 10.17 seconds to spawn the instance on the hypervisor. [ 851.754854] env[68233]: DEBUG nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.755966] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30534b9-e522-43ae-9722-545625e49d3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.991094] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.252s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.992073] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 37.895s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.026254] env[68233]: INFO nova.scheduler.client.report [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Deleted allocations for instance b5e9ef73-2203-42b4-bee0-76d439ffaa17 [ 852.223021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "refresh_cache-c5b42243-878f-4150-a5d3-63d69e474bd1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.223021] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Instance network_info: |[{"id": "99d9f200-1c6c-4ab7-9482-aa557fad7711", "address": "fa:16:3e:07:51:b3", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d9f200-1c", "ovs_interfaceid": "99d9f200-1c6c-4ab7-9482-aa557fad7711", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 852.223021] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:51:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99d9f200-1c6c-4ab7-9482-aa557fad7711', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 852.230432] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 852.230902] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 852.231347] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-06910326-876c-4082-8d0d-b2d59c614ee2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.263851] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.263851] env[68233]: value = "task-2782400" [ 852.263851] env[68233]: _type = "Task" [ 852.263851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.276361] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782400, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.279591] env[68233]: INFO nova.compute.manager [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Took 51.57 seconds to build instance. [ 852.535227] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2dcc20d1-f6d7-4368-ba1b-50dbaa550ac2 tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "b5e9ef73-2203-42b4-bee0-76d439ffaa17" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.117s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.776208] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782400, 'name': CreateVM_Task, 'duration_secs': 0.384434} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.776463] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.777104] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.777282] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.777601] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 852.777874] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-569e032c-8898-4b1f-8184-d2f96209f24b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.783218] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7dcb343e-93a3-4015-911b-fba4693ceb08 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.048s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.783530] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 852.783530] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526e568d-21aa-1d58-286c-466113c467e8" [ 852.783530] env[68233]: _type = "Task" [ 852.783530] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.793597] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526e568d-21aa-1d58-286c-466113c467e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.822830] env[68233]: DEBUG nova.compute.manager [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Received event network-changed-99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 852.822978] env[68233]: DEBUG nova.compute.manager [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Refreshing instance network info cache due to event network-changed-99d9f200-1c6c-4ab7-9482-aa557fad7711. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 852.823274] env[68233]: DEBUG oslo_concurrency.lockutils [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] Acquiring lock "refresh_cache-c5b42243-878f-4150-a5d3-63d69e474bd1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.823431] env[68233]: DEBUG oslo_concurrency.lockutils [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] Acquired lock "refresh_cache-c5b42243-878f-4150-a5d3-63d69e474bd1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.823602] env[68233]: DEBUG nova.network.neutron [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Refreshing network info cache for port 99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.013428] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Applying migration context for instance 72467d49-6fa8-42db-871e-4e50e77eedf7 as it has an incoming, in-progress migration c46ea15e-6075-47b3-b44f-d79f032a7b76. Migration status is reverting {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 853.015760] env[68233]: INFO nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating resource usage from migration c46ea15e-6075-47b3-b44f-d79f032a7b76 [ 853.040225] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4a388705-7e00-45dc-8891-c6e587b1cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.040442] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 87385201-3118-4a8e-9739-db3b431566c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.040610] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 11ec9800-fa7e-4dbd-bdc1-63d0b496589f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.040768] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2812bf7c-5117-4fd9-9330-0cc94277bf5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.040930] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c6a358b7-0e6a-43bb-a171-5e6175f947bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.041144] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.041304] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 6ceb7d2d-143a-464a-aca5-6b6838630bb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.041474] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance f2af60e6-496c-4edb-9e99-4b45fa94bfeb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 853.041619] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance f7a1bfc5-7141-4764-b3fe-08d06020209a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.041758] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 876d428d-d5c9-422a-aba2-2d6c61b092db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.041899] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance abdf9de2-8563-4a31-91a3-0c18b0387533 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.042050] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.042209] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 13972b73-8bae-4a2a-a987-b6177381e7c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.042391] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 990e1a66-f2ab-4925-b1da-58cdc41a6315 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.042557] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 48270554-abe4-4f72-b8b9-5f2de6a9ed26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.042693] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 28af332b-4f9b-4474-afdc-ab17e92df6e7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 853.042816] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 64b8997c-3246-4c97-a6c9-3a6a23645d38 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 853.042971] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 07c7d125-d689-4499-aa4a-b9d3441c6fd0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 853.043138] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.043295] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance da2a5acb-0861-4225-a6b4-324482c480ea is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 853.043448] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 853.043603] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.043742] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 8880bb83-56f1-4ad2-9d6d-1885826aed21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.043882] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 03688e90-5433-47ca-baaa-75861ad093b7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.044034] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c5b42243-878f-4150-a5d3-63d69e474bd1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.044183] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Migration c46ea15e-6075-47b3-b44f-d79f032a7b76 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 853.044320] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 72467d49-6fa8-42db-871e-4e50e77eedf7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 853.291465] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 853.307542] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526e568d-21aa-1d58-286c-466113c467e8, 'name': SearchDatastore_Task, 'duration_secs': 0.018551} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.307976] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.308988] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.309086] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.309526] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.309764] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.310355] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07b1096d-9635-41cc-b51c-b91ec14c7584 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.329096] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.329449] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.331063] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3cbd8ed-e596-489b-8948-774411a40ce5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.338728] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 853.338728] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52acb0e7-3486-7990-ab5c-ecc81906f26d" [ 853.338728] env[68233]: _type = "Task" [ 853.338728] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.348953] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52acb0e7-3486-7990-ab5c-ecc81906f26d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.447839] env[68233]: INFO nova.compute.manager [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Rebuilding instance [ 853.499258] env[68233]: DEBUG nova.compute.manager [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.500129] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f02348-187d-4b6b-a49a-312f15836ed7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.548216] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2c219b8c-813d-4155-af3b-327a7ebd75fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 853.583515] env[68233]: DEBUG nova.network.neutron [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Updated VIF entry in instance network info cache for port 99d9f200-1c6c-4ab7-9482-aa557fad7711. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 853.583897] env[68233]: DEBUG nova.network.neutron [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Updating instance_info_cache with network_info: [{"id": "99d9f200-1c6c-4ab7-9482-aa557fad7711", "address": "fa:16:3e:07:51:b3", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99d9f200-1c", "ovs_interfaceid": "99d9f200-1c6c-4ab7-9482-aa557fad7711", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.592664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "f7a1bfc5-7141-4764-b3fe-08d06020209a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.592929] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.593160] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "f7a1bfc5-7141-4764-b3fe-08d06020209a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.593355] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.593518] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.596171] env[68233]: INFO nova.compute.manager [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Terminating instance [ 853.702607] env[68233]: DEBUG nova.compute.manager [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 853.703657] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f486e067-b863-419a-8b31-fb53ddb81bb8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.810798] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 853.849704] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52acb0e7-3486-7990-ab5c-ecc81906f26d, 'name': SearchDatastore_Task, 'duration_secs': 0.026957} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.850549] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1924fd02-ac25-4a3b-b068-e39a1627b0ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.856331] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 853.856331] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5229b262-0cec-125d-ab4c-7b81873adbd0" [ 853.856331] env[68233]: _type = "Task" [ 853.856331] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.864621] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5229b262-0cec-125d-ab4c-7b81873adbd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.052565] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance bb59f959-4cf8-4244-b7b4-6bf630a616b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 854.086626] env[68233]: DEBUG oslo_concurrency.lockutils [req-555914d7-2d64-4d91-8c56-29b219934d99 req-9bf02698-9e73-4b1f-bd57-848873193e76 service nova] Releasing lock "refresh_cache-c5b42243-878f-4150-a5d3-63d69e474bd1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.100014] env[68233]: DEBUG nova.compute.manager [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.100256] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.101224] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e2dc82-5cd4-475d-a497-953f5a3990b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.109342] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.109644] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83874f5f-2f82-4da5-8cdf-3e1fac6cd24d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.118571] env[68233]: DEBUG oslo_vmware.api [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 854.118571] env[68233]: value = "task-2782401" [ 854.118571] env[68233]: _type = "Task" [ 854.118571] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.128597] env[68233]: DEBUG oslo_vmware.api [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782401, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.216328] env[68233]: INFO nova.compute.manager [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] instance snapshotting [ 854.222614] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15e04622-10ce-4f58-bac4-f4ee06454a0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.244447] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ae8bef-5569-40b6-85d3-02b1b582d7c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.370767] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5229b262-0cec-125d-ab4c-7b81873adbd0, 'name': SearchDatastore_Task, 'duration_secs': 0.042809} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.371161] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.371592] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c5b42243-878f-4150-a5d3-63d69e474bd1/c5b42243-878f-4150-a5d3-63d69e474bd1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.371882] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2da0bada-52d3-4c94-ae2a-90bd7fbc76ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.380439] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 854.380439] env[68233]: value = "task-2782402" [ 854.380439] env[68233]: _type = "Task" [ 854.380439] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.390453] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.514167] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.514575] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f762605d-8e7c-45db-bd81-a7594b688764 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.523417] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 854.523417] env[68233]: value = "task-2782403" [ 854.523417] env[68233]: _type = "Task" [ 854.523417] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.533255] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782403, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.555658] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 3d759f4f-3845-4bb5-8cfa-639b7023bb27 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 854.631341] env[68233]: DEBUG oslo_vmware.api [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782401, 'name': PowerOffVM_Task, 'duration_secs': 0.218215} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.631632] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 854.631807] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 854.632183] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ccc17c4-e1fd-4df3-89eb-0aa7f5043543 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.722685] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 854.723061] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 854.723210] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Deleting the datastore file [datastore2] f7a1bfc5-7141-4764-b3fe-08d06020209a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 854.723548] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4ea83fc-b7f4-455d-b6e0-1519a35e99cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.732970] env[68233]: DEBUG oslo_vmware.api [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for the task: (returnval){ [ 854.732970] env[68233]: value = "task-2782405" [ 854.732970] env[68233]: _type = "Task" [ 854.732970] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.745217] env[68233]: DEBUG oslo_vmware.api [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.757655] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 854.758021] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8b5e3b45-d94e-47b6-a721-d0e72394811d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.768288] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 854.768288] env[68233]: value = "task-2782406" [ 854.768288] env[68233]: _type = "Task" [ 854.768288] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.779221] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782406, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.890938] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782402, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487957} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.891288] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c5b42243-878f-4150-a5d3-63d69e474bd1/c5b42243-878f-4150-a5d3-63d69e474bd1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 854.891511] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 854.891770] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-600fb4f2-9ca1-49cc-b301-a393e6ebf491 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.900801] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 854.900801] env[68233]: value = "task-2782407" [ 854.900801] env[68233]: _type = "Task" [ 854.900801] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.909824] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782407, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.035025] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782403, 'name': PowerOffVM_Task, 'duration_secs': 0.492744} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.035198] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.035437] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.036253] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d60d396-19a0-4336-bfd7-c1ab6433e754 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.044533] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.044794] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1876bd9d-284f-4768-b25e-cbb3f16d128a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.058305] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 22c06baf-6316-4531-8037-b8b77c401596 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 855.156432] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.156759] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.156950] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleting the datastore file [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.157281] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6287b1b4-54ef-415c-87ef-649ac2d40542 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.167019] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 855.167019] env[68233]: value = "task-2782409" [ 855.167019] env[68233]: _type = "Task" [ 855.167019] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.176777] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.243885] env[68233]: DEBUG oslo_vmware.api [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Task: {'id': task-2782405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.264698} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.244260] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.244460] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.244637] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.244808] env[68233]: INFO nova.compute.manager [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 855.245083] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.245308] env[68233]: DEBUG nova.compute.manager [-] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.245445] env[68233]: DEBUG nova.network.neutron [-] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.278956] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782406, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.411999] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782407, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.134195} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.412403] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.413290] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0e1ab2-3250-4f0f-a7ae-9d8b6d160629 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.441874] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] c5b42243-878f-4150-a5d3-63d69e474bd1/c5b42243-878f-4150-a5d3-63d69e474bd1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.443751] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d33c848-b490-430b-adf3-2882d1d8b597 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.463657] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 855.463657] env[68233]: value = "task-2782410" [ 855.463657] env[68233]: _type = "Task" [ 855.463657] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.472949] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.562459] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance d0d6eed0-db5b-4371-8f03-b3415fd833f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 855.677520] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.700941] env[68233]: DEBUG nova.compute.manager [req-0f6a30f8-b43b-469e-9b8b-cf09c0a608bf req-0a7874de-4d1b-4b26-824a-14b384014a77 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Received event network-vif-deleted-1078efb6-b35d-496e-aeb6-08489c2bfbea {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 855.701226] env[68233]: INFO nova.compute.manager [req-0f6a30f8-b43b-469e-9b8b-cf09c0a608bf req-0a7874de-4d1b-4b26-824a-14b384014a77 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Neutron deleted interface 1078efb6-b35d-496e-aeb6-08489c2bfbea; detaching it from the instance and deleting it from the info cache [ 855.701408] env[68233]: DEBUG nova.network.neutron [req-0f6a30f8-b43b-469e-9b8b-cf09c0a608bf req-0a7874de-4d1b-4b26-824a-14b384014a77 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.779088] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782406, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.974432] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782410, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.065596] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4677d047-f8dc-4501-be9b-14e6a2222f46 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.177701] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.583878} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.177952] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.178159] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.178343] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.185324] env[68233]: DEBUG nova.network.neutron [-] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.204705] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdff2d75-0d79-4f12-ba3c-9de398fc8d78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.216486] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2aa0a3-7042-49ec-b483-971b7e6aede8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.259015] env[68233]: DEBUG nova.compute.manager [req-0f6a30f8-b43b-469e-9b8b-cf09c0a608bf req-0a7874de-4d1b-4b26-824a-14b384014a77 service nova] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Detach interface failed, port_id=1078efb6-b35d-496e-aeb6-08489c2bfbea, reason: Instance f7a1bfc5-7141-4764-b3fe-08d06020209a could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 856.279441] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782406, 'name': CreateSnapshot_Task, 'duration_secs': 1.377289} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.279751] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 856.280524] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472d0602-a099-432f-94a9-5c875a5ed004 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.474744] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782410, 'name': ReconfigVM_Task, 'duration_secs': 0.712372} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.475044] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Reconfigured VM instance instance-00000041 to attach disk [datastore2] c5b42243-878f-4150-a5d3-63d69e474bd1/c5b42243-878f-4150-a5d3-63d69e474bd1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.475697] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9bec5d1f-c042-4fd3-bf63-33ecad2d439e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.482708] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 856.482708] env[68233]: value = "task-2782411" [ 856.482708] env[68233]: _type = "Task" [ 856.482708] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.493055] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782411, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.569167] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 3cca16e1-3363-4026-9359-4ed2ba41e25d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.688400] env[68233]: INFO nova.compute.manager [-] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Took 1.44 seconds to deallocate network for instance. [ 856.801729] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 856.802134] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e87ba75a-25fa-428e-93f2-6ab820b79583 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.813835] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 856.813835] env[68233]: value = "task-2782412" [ 856.813835] env[68233]: _type = "Task" [ 856.813835] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.822801] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782412, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.993112] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782411, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.072282] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c5c8bf0c-eb58-41bc-a316-b4ac78187658 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.193944] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.212552] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 857.212795] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.212950] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 857.213249] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.213439] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 857.213577] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 857.213782] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 857.213954] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 857.214151] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 857.214325] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 857.214555] env[68233]: DEBUG nova.virt.hardware [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 857.215591] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3bb567-f815-4694-bedf-b0677aac42fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.224983] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d6353d-a6b7-4305-8c5f-43d03f475a95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.239709] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:29:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bbc186d-7708-4c96-a2a7-454a8aae1e5c', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 857.247302] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 857.247553] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 857.247795] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ac0152e-57b5-439c-a354-12771d7bc064 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.267844] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 857.267844] env[68233]: value = "task-2782413" [ 857.267844] env[68233]: _type = "Task" [ 857.267844] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.275933] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782413, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.325931] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782412, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.493565] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782411, 'name': Rename_Task, 'duration_secs': 0.746068} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.493896] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.494433] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f7af01d-2c1e-4bba-8df2-69d2bf395810 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.502460] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 857.502460] env[68233]: value = "task-2782414" [ 857.502460] env[68233]: _type = "Task" [ 857.502460] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.511422] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782414, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.575137] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dca145c8-ed95-4dfb-9534-37035c75dafb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.575505] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 857.575659] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4672MB phys_disk=200GB used_disk=22GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 857.779126] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782413, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.824396] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782412, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.013074] env[68233]: DEBUG oslo_vmware.api [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782414, 'name': PowerOnVM_Task, 'duration_secs': 0.503962} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.015781] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.016292] env[68233]: INFO nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Took 8.48 seconds to spawn the instance on the hypervisor. [ 858.016292] env[68233]: DEBUG nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.017219] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bf58d7-7f6f-4cd1-84aa-78a236833768 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.066961] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fdaa99-595e-4a87-871b-3de7ee2f3092 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.075578] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54b277f-840a-4a5c-be29-99415fb20071 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.106717] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ef1f1e-4bae-42b3-a0b4-46fe9a0dcefd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.116798] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c052bb-df7b-4672-9b16-4abdb79b8c1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.132594] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.280016] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782413, 'name': CreateVM_Task, 'duration_secs': 0.519253} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.280263] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 858.281044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.281198] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.281590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 858.281778] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8e541d2-10e0-47d3-ad75-12b6e808435a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.286996] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 858.286996] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52608fef-d594-26b1-3d9f-542c66393b89" [ 858.286996] env[68233]: _type = "Task" [ 858.286996] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.294568] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52608fef-d594-26b1-3d9f-542c66393b89, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.323644] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782412, 'name': CloneVM_Task, 'duration_secs': 1.112305} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.323888] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Created linked-clone VM from snapshot [ 858.324603] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236f8d40-cf18-4d6f-8acc-80b6f164f7ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.331848] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Uploading image 71bda9d2-4a7b-4362-a322-154a9bbf9c79 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 858.351424] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 858.351424] env[68233]: value = "vm-559396" [ 858.351424] env[68233]: _type = "VirtualMachine" [ 858.351424] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 858.351676] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-ebd5c133-5b6a-4b29-a9ce-8945d1dfc35e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.359153] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lease: (returnval){ [ 858.359153] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52212b47-ce3d-2d63-d96b-f7526dcc8799" [ 858.359153] env[68233]: _type = "HttpNfcLease" [ 858.359153] env[68233]: } obtained for exporting VM: (result){ [ 858.359153] env[68233]: value = "vm-559396" [ 858.359153] env[68233]: _type = "VirtualMachine" [ 858.359153] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 858.359430] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the lease: (returnval){ [ 858.359430] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52212b47-ce3d-2d63-d96b-f7526dcc8799" [ 858.359430] env[68233]: _type = "HttpNfcLease" [ 858.359430] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 858.366374] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 858.366374] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52212b47-ce3d-2d63-d96b-f7526dcc8799" [ 858.366374] env[68233]: _type = "HttpNfcLease" [ 858.366374] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 858.537580] env[68233]: INFO nova.compute.manager [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Took 51.11 seconds to build instance. [ 858.636034] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.798791] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52608fef-d594-26b1-3d9f-542c66393b89, 'name': SearchDatastore_Task, 'duration_secs': 0.035361} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.799151] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.799407] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 858.799647] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.799793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.799971] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 858.800252] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ba3a5f7-78a3-4105-b3b7-4c98969df67b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.815042] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 858.815176] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 858.815908] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a7eedad-94fe-46ec-bfdc-2935a0215cdf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.821891] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 858.821891] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5284c922-41f2-4d94-951b-e32df1b37d65" [ 858.821891] env[68233]: _type = "Task" [ 858.821891] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.830061] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5284c922-41f2-4d94-951b-e32df1b37d65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.868776] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 858.868776] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52212b47-ce3d-2d63-d96b-f7526dcc8799" [ 858.868776] env[68233]: _type = "HttpNfcLease" [ 858.868776] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 858.868907] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 858.868907] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52212b47-ce3d-2d63-d96b-f7526dcc8799" [ 858.868907] env[68233]: _type = "HttpNfcLease" [ 858.868907] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 858.869714] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f159ac30-4d97-4f6c-9b3f-57cb942e9a7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.879249] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52029f29-88d6-ee34-8fd6-1490eb28d8be/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 858.879391] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52029f29-88d6-ee34-8fd6-1490eb28d8be/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 859.036210] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5c31a526-b6f0-4df4-885b-e175422b6459 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.039292] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a352239-081d-4e6e-87ae-587f10f63eda tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.422s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.140535] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 859.140935] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.149s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.141339] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.385s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.143452] env[68233]: INFO nova.compute.claims [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.147721] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 859.147721] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Cleaning up deleted instances {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 859.337158] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5284c922-41f2-4d94-951b-e32df1b37d65, 'name': SearchDatastore_Task, 'duration_secs': 0.039713} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.338415] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d831a52-5172-41fc-89d7-927510345d60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.346520] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 859.346520] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527949e3-871b-abf2-4139-fd60b529a9ff" [ 859.346520] env[68233]: _type = "Task" [ 859.346520] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.358789] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527949e3-871b-abf2-4139-fd60b529a9ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.662689] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] There are 37 instances to clean {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 859.663120] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: b5e9ef73-2203-42b4-bee0-76d439ffaa17] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 859.858719] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527949e3-871b-abf2-4139-fd60b529a9ff, 'name': SearchDatastore_Task, 'duration_secs': 0.011632} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.859898] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.859898] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 859.859898] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deb4b53f-448f-43c5-87c8-486297c59e2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.868962] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 859.868962] env[68233]: value = "task-2782416" [ 859.868962] env[68233]: _type = "Task" [ 859.868962] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.880072] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782416, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.955504] env[68233]: DEBUG nova.compute.manager [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 859.956411] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86c3aa7-008a-4cc6-91d0-8e3092366904 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.169842] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 65f9fe09-97dc-4988-bae4-243d60e33be9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 860.383567] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782416, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50331} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.386758] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 860.387087] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 860.387628] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5044025-ba8a-4fa2-b214-8c569173b30e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.395145] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 860.395145] env[68233]: value = "task-2782417" [ 860.395145] env[68233]: _type = "Task" [ 860.395145] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.407394] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782417, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.473053] env[68233]: INFO nova.compute.manager [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] instance snapshotting [ 860.476810] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50798819-222b-4c59-8369-686b91a93850 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.505856] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d53623-a780-4ee3-9070-1c9439594fa9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.672763] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 09e4644d-d845-47f4-8748-925f739863b9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 860.691862] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c21abd-eee2-470a-a619-cfefdaf888fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.701736] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac23905-0916-4980-b0fb-a5d1925fceb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.735535] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383d3f60-7712-4b61-8c62-9a788b7c737f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.744764] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c813ad32-1b4c-4cb3-a98e-5f66bbeb043b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.760379] env[68233]: DEBUG nova.compute.provider_tree [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.905770] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782417, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072289} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.906150] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.907092] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c853e8f8-21e4-4ee7-87ae-f6779b4da77b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.933174] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.933718] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29d61dde-a150-480d-aeab-124c4b04a602 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.956554] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 860.956554] env[68233]: value = "task-2782418" [ 860.956554] env[68233]: _type = "Task" [ 860.956554] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.966200] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782418, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.024628] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 861.025601] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-edde1c3f-3645-4e65-8535-bcf1e7c5276c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.035222] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 861.035222] env[68233]: value = "task-2782419" [ 861.035222] env[68233]: _type = "Task" [ 861.035222] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.045272] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782419, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.176432] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 175ced9c-52f6-4577-a010-8fffc2876e6a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 861.264357] env[68233]: DEBUG nova.scheduler.client.report [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 861.467991] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782418, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.547920] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782419, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.680039] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 135c2d22-26ac-41a4-a860-accc12dd4c9a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 861.769574] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.770719] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 861.773763] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.299s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.776438] env[68233]: INFO nova.compute.claims [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 861.971689] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782418, 'name': ReconfigVM_Task, 'duration_secs': 0.638646} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.971689] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Reconfigured VM instance instance-00000016 to attach disk [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 861.972281] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c3684df-e5a7-408a-8a2b-81a33f2f6e0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.981820] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 861.981820] env[68233]: value = "task-2782420" [ 861.981820] env[68233]: _type = "Task" [ 861.981820] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.994710] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782420, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.048024] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782419, 'name': CreateSnapshot_Task, 'duration_secs': 1.012701} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.048024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 862.048685] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3239e08-9a1d-48df-9140-3e8d96a18544 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.184338] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: dcd8cca2-b62c-44a6-9e77-f336d2d39c09] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 862.284223] env[68233]: DEBUG nova.compute.utils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 862.285611] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 862.286268] env[68233]: DEBUG nova.network.neutron [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.345225] env[68233]: DEBUG nova.policy [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1530f9013bf3447681ec08c2b1f42450', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9134c1a2d50427da2dfadce2cd08a93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 862.493221] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782420, 'name': Rename_Task, 'duration_secs': 0.219725} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.493538] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 862.493825] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fea582b-112b-4a79-b547-4b0cb58371ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.501997] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 862.501997] env[68233]: value = "task-2782421" [ 862.501997] env[68233]: _type = "Task" [ 862.501997] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.513639] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.568865] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 862.569237] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2445eb73-5eda-4814-a09b-0c628ab47466 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.579449] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 862.579449] env[68233]: value = "task-2782422" [ 862.579449] env[68233]: _type = "Task" [ 862.579449] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.588289] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782422, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.689221] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: c8fd5539-8add-45fe-a0ac-8767bf8a330e] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 862.743716] env[68233]: DEBUG nova.network.neutron [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Successfully created port: d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 862.792139] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.016361] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782421, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.097401] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782422, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.192693] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 2a88648c-f00d-4d7b-905d-e70c327e248a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 863.337844] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-124948f0-36f4-48ee-870e-daec06cdcf28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.346530] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fa9208-b73c-4bbe-ae80-a2b313b4e99a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.379829] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4e88e3-067c-4c01-91e8-656ba59edfa9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.388997] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f819c7a-d58b-43c6-96ea-bc8700a29f19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.405867] env[68233]: DEBUG nova.compute.provider_tree [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.513667] env[68233]: DEBUG oslo_vmware.api [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782421, 'name': PowerOnVM_Task, 'duration_secs': 0.616147} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.513971] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 863.514198] env[68233]: DEBUG nova.compute.manager [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 863.514962] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bece861f-dfa1-45d5-9511-920a3d4a3829 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.591257] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782422, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.696675] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 6c34d7ce-7bf1-4f88-812f-adc1eb5353dd] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 863.802079] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 863.823204] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 863.823468] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 863.823625] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 863.823806] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 863.823951] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 863.824146] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 863.824384] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 863.824554] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 863.824723] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 863.824884] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 863.825065] env[68233]: DEBUG nova.virt.hardware [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 863.825922] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3651a6-3598-49d9-b8be-2bae4c70fd7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.835147] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e034f50f-6821-4d49-8f9b-d93b110b6015 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.911073] env[68233]: DEBUG nova.scheduler.client.report [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.031092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.092980] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782422, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.200228] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 86528c8b-b51e-480d-a7bf-013d990d51ca] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 864.416026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.642s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.416638] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 864.423029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.880s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.423029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.423029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.303s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.423943] env[68233]: INFO nova.compute.claims [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.460173] env[68233]: INFO nova.scheduler.client.report [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Deleted allocations for instance 28af332b-4f9b-4474-afdc-ab17e92df6e7 [ 864.593242] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782422, 'name': CloneVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.703396] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: a5468df9-c54d-4014-8002-ef82f111a7a4] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 864.781154] env[68233]: DEBUG nova.network.neutron [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Successfully updated port: d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.847020] env[68233]: DEBUG nova.compute.manager [req-f66ae5c4-4517-4dc0-874f-328daaf8bf95 req-c0a5d8a0-2c50-4996-a81c-474bb412979e service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Received event network-vif-plugged-d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 864.847020] env[68233]: DEBUG oslo_concurrency.lockutils [req-f66ae5c4-4517-4dc0-874f-328daaf8bf95 req-c0a5d8a0-2c50-4996-a81c-474bb412979e service nova] Acquiring lock "2c219b8c-813d-4155-af3b-327a7ebd75fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.847020] env[68233]: DEBUG oslo_concurrency.lockutils [req-f66ae5c4-4517-4dc0-874f-328daaf8bf95 req-c0a5d8a0-2c50-4996-a81c-474bb412979e service nova] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.847020] env[68233]: DEBUG oslo_concurrency.lockutils [req-f66ae5c4-4517-4dc0-874f-328daaf8bf95 req-c0a5d8a0-2c50-4996-a81c-474bb412979e service nova] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.847020] env[68233]: DEBUG nova.compute.manager [req-f66ae5c4-4517-4dc0-874f-328daaf8bf95 req-c0a5d8a0-2c50-4996-a81c-474bb412979e service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] No waiting events found dispatching network-vif-plugged-d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 864.847757] env[68233]: WARNING nova.compute.manager [req-f66ae5c4-4517-4dc0-874f-328daaf8bf95 req-c0a5d8a0-2c50-4996-a81c-474bb412979e service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Received unexpected event network-vif-plugged-d5ff7193-47a7-4f21-8f05-413e7d813756 for instance with vm_state building and task_state spawning. [ 864.935880] env[68233]: DEBUG nova.compute.utils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 864.938149] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 864.938292] env[68233]: DEBUG nova.network.neutron [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 864.969532] env[68233]: DEBUG oslo_concurrency.lockutils [None req-87370f30-d748-4ef8-91f2-711e3c05a1ff tempest-ServerAddressesTestJSON-2007014818 tempest-ServerAddressesTestJSON-2007014818-project-member] Lock "28af332b-4f9b-4474-afdc-ab17e92df6e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.827s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.014801] env[68233]: DEBUG nova.policy [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cb03ef6645a5482bb3291ab4f64694e0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ef8413c75d144f8838e651ac023fa1a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 865.095173] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782422, 'name': CloneVM_Task, 'duration_secs': 2.021443} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.095724] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Created linked-clone VM from snapshot [ 865.096532] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c07f771-b680-4e44-a1b5-7b4b36ef651a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.105776] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Uploading image fa91d76b-2020-4abf-8837-92f1504eee4f {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 865.133252] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 865.133252] env[68233]: value = "vm-559399" [ 865.133252] env[68233]: _type = "VirtualMachine" [ 865.133252] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 865.133562] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2f148f4a-1178-4afa-b13c-3b61d89159d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.143112] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease: (returnval){ [ 865.143112] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526327f1-f33f-e4c2-d054-8f2960c1afa1" [ 865.143112] env[68233]: _type = "HttpNfcLease" [ 865.143112] env[68233]: } obtained for exporting VM: (result){ [ 865.143112] env[68233]: value = "vm-559399" [ 865.143112] env[68233]: _type = "VirtualMachine" [ 865.143112] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 865.143629] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the lease: (returnval){ [ 865.143629] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526327f1-f33f-e4c2-d054-8f2960c1afa1" [ 865.143629] env[68233]: _type = "HttpNfcLease" [ 865.143629] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 865.151305] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 865.151305] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526327f1-f33f-e4c2-d054-8f2960c1afa1" [ 865.151305] env[68233]: _type = "HttpNfcLease" [ 865.151305] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 865.207208] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 9203ac1f-3d3f-4d1b-959b-5f15b09fd5bb] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 865.286897] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "refresh_cache-2c219b8c-813d-4155-af3b-327a7ebd75fc" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.286897] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquired lock "refresh_cache-2c219b8c-813d-4155-af3b-327a7ebd75fc" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.286897] env[68233]: DEBUG nova.network.neutron [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.444304] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 865.447457] env[68233]: DEBUG nova.network.neutron [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Successfully created port: e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 865.654901] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 865.654901] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526327f1-f33f-e4c2-d054-8f2960c1afa1" [ 865.654901] env[68233]: _type = "HttpNfcLease" [ 865.654901] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 865.655456] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 865.655456] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526327f1-f33f-e4c2-d054-8f2960c1afa1" [ 865.655456] env[68233]: _type = "HttpNfcLease" [ 865.655456] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 865.656021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33614a27-074b-452d-8847-e028d05e6af6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.666985] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527487f1-bd3d-d2fb-7261-67a223b1afad/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 865.667247] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527487f1-bd3d-d2fb-7261-67a223b1afad/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 865.734388] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 5ed44950-8e9b-4f42-9611-d5bff01dc905] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 865.764300] env[68233]: INFO nova.compute.manager [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Rebuilding instance [ 865.792094] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2e08e885-8f94-491b-84ce-5b6c6dd73d04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.821446] env[68233]: DEBUG nova.network.neutron [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.833725] env[68233]: DEBUG nova.compute.manager [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 865.834790] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176b5cea-70b2-4753-9bf2-2ea7f77fb820 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.074532] env[68233]: DEBUG nova.network.neutron [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Updating instance_info_cache with network_info: [{"id": "d5ff7193-47a7-4f21-8f05-413e7d813756", "address": "fa:16:3e:64:b3:f6", "network": {"id": "7e857221-41df-41f9-af29-eb053c6d6258", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-254899092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9134c1a2d50427da2dfadce2cd08a93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ff7193-47", "ovs_interfaceid": "d5ff7193-47a7-4f21-8f05-413e7d813756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.182380] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171b260a-979b-4b7c-8147-50bd254bdcbd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.190678] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7aa57b-82db-4687-afff-4b7e138936c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.229374] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e284417c-8c56-4687-9db7-0f74e346690e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.239719] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51891dc5-2ee9-4ffc-8969-785d26296ae7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.244599] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: b056fbf4-4873-4ec9-905a-ad973c8fb27a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 866.261832] env[68233]: DEBUG nova.compute.provider_tree [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.471079] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 866.582446] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Releasing lock "refresh_cache-2c219b8c-813d-4155-af3b-327a7ebd75fc" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.582446] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Instance network_info: |[{"id": "d5ff7193-47a7-4f21-8f05-413e7d813756", "address": "fa:16:3e:64:b3:f6", "network": {"id": "7e857221-41df-41f9-af29-eb053c6d6258", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-254899092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9134c1a2d50427da2dfadce2cd08a93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ff7193-47", "ovs_interfaceid": "d5ff7193-47a7-4f21-8f05-413e7d813756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 866.584036] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:b3:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd5ff7193-47a7-4f21-8f05-413e7d813756', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 866.592363] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Creating folder: Project (c9134c1a2d50427da2dfadce2cd08a93). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.592674] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58871109-87a5-44ab-9ad8-bf142a7b3539 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.613603] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Created folder: Project (c9134c1a2d50427da2dfadce2cd08a93) in parent group-v559223. [ 866.614650] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Creating folder: Instances. Parent ref: group-v559400. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 866.616173] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f13516e4-990d-4cea-ad96-613b900a97d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.627942] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Created folder: Instances in parent group-v559400. [ 866.628298] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.628583] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 866.628835] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-334cd737-f029-4d15-beac-708c7fe31001 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.653107] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 866.653107] env[68233]: value = "task-2782426" [ 866.653107] env[68233]: _type = "Task" [ 866.653107] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.662394] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782426, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.748612] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 080ab438-269b-427a-9ee9-71c59d9c2a91] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 866.767539] env[68233]: DEBUG nova.scheduler.client.report [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.865598] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.865598] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5227ce63-ee4c-4f77-b5c5-0ed718044af9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.873955] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 866.873955] env[68233]: value = "task-2782427" [ 866.873955] env[68233]: _type = "Task" [ 866.873955] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.887957] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.928995] env[68233]: DEBUG nova.compute.manager [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Received event network-changed-d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 866.932174] env[68233]: DEBUG nova.compute.manager [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Refreshing instance network info cache due to event network-changed-d5ff7193-47a7-4f21-8f05-413e7d813756. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 866.932174] env[68233]: DEBUG oslo_concurrency.lockutils [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] Acquiring lock "refresh_cache-2c219b8c-813d-4155-af3b-327a7ebd75fc" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.932174] env[68233]: DEBUG oslo_concurrency.lockutils [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] Acquired lock "refresh_cache-2c219b8c-813d-4155-af3b-327a7ebd75fc" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.932174] env[68233]: DEBUG nova.network.neutron [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Refreshing network info cache for port d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 867.165028] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782426, 'name': CreateVM_Task, 'duration_secs': 0.444612} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.165540] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 867.166413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.166690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.167368] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 867.167508] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f80e50e-9dde-4bbe-b69f-0fdb8caf062f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.174042] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 867.174042] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52eeeba5-d9c4-7368-fde5-4d6a51d1105d" [ 867.174042] env[68233]: _type = "Task" [ 867.174042] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.184259] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52eeeba5-d9c4-7368-fde5-4d6a51d1105d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.198504] env[68233]: DEBUG nova.network.neutron [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Successfully updated port: e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 867.255289] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 3c9b701e-6461-45e3-8654-3291c5a487b9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 867.273926] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.851s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.274570] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 867.277278] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.071s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.277544] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.280538] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.887s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.280825] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.282779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.105s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.284414] env[68233]: INFO nova.compute.claims [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.321348] env[68233]: INFO nova.scheduler.client.report [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted allocations for instance 07c7d125-d689-4499-aa4a-b9d3441c6fd0 [ 867.327152] env[68233]: INFO nova.scheduler.client.report [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted allocations for instance 64b8997c-3246-4c97-a6c9-3a6a23645d38 [ 867.385611] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782427, 'name': PowerOffVM_Task, 'duration_secs': 0.26303} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.385908] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 867.386140] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 867.386922] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4ed43e-b9b6-43d7-9238-24d13c5e568c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.395216] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 867.395491] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a42ae524-3d0a-4da1-81f1-67d7133e0e46 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.482606] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 867.482896] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 867.483057] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleting the datastore file [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.483293] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e04e53e7-5e06-4536-9dc5-7dedb086fac4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.491185] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 867.491185] env[68233]: value = "task-2782429" [ 867.491185] env[68233]: _type = "Task" [ 867.491185] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.502841] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.690278] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52eeeba5-d9c4-7368-fde5-4d6a51d1105d, 'name': SearchDatastore_Task, 'duration_secs': 0.014664} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.690486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.690799] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 867.691092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.691314] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.691574] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 867.691883] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7b3405e-5317-4b75-be1e-cf9a5b697b57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.704216] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "refresh_cache-bb59f959-4cf8-4244-b7b4-6bf630a616b3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.704529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquired lock "refresh_cache-bb59f959-4cf8-4244-b7b4-6bf630a616b3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 867.704628] env[68233]: DEBUG nova.network.neutron [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 867.705814] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 867.705993] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 867.707618] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e47e94-dda8-41e0-9eea-b09def52dbdd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.718130] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 867.718130] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b1513f-c51a-fe3c-9cfe-518ce718e8c0" [ 867.718130] env[68233]: _type = "Task" [ 867.718130] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.729766] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b1513f-c51a-fe3c-9cfe-518ce718e8c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.758848] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 636b6b36-3ab5-4851-a232-d27b54895595] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 867.788074] env[68233]: DEBUG nova.network.neutron [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Updated VIF entry in instance network info cache for port d5ff7193-47a7-4f21-8f05-413e7d813756. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 867.788485] env[68233]: DEBUG nova.network.neutron [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Updating instance_info_cache with network_info: [{"id": "d5ff7193-47a7-4f21-8f05-413e7d813756", "address": "fa:16:3e:64:b3:f6", "network": {"id": "7e857221-41df-41f9-af29-eb053c6d6258", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-254899092-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9134c1a2d50427da2dfadce2cd08a93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd5ff7193-47", "ovs_interfaceid": "d5ff7193-47a7-4f21-8f05-413e7d813756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.790798] env[68233]: DEBUG nova.compute.utils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 867.794810] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 867.794965] env[68233]: DEBUG nova.network.neutron [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.835069] env[68233]: DEBUG nova.policy [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9d455afa7a543a38503076f5d15213a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed5b204b46f445e4a12938af770ecbb5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 867.839479] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a509bf9-e6b7-4668-8ed6-80cb4feb56bd tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "64b8997c-3246-4c97-a6c9-3a6a23645d38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.314s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.840478] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd66e2b3-f0ca-42f8-b84b-b5f4ce963d3a tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "07c7d125-d689-4499-aa4a-b9d3441c6fd0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.180s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.935619] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 867.935867] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.936035] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 867.936491] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.936491] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 867.936491] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 867.936709] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 867.936911] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 867.938176] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 867.938453] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 867.938633] env[68233]: DEBUG nova.virt.hardware [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 867.940977] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae61b00-e1b7-4f37-88d7-c3a6c3a43b98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.952183] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52029f29-88d6-ee34-8fd6-1490eb28d8be/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 867.953635] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c7dfec-0c30-4727-96d4-dfeabde64554 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.958270] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f92039-1ce9-43aa-910e-35cb70bb745f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.965465] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52029f29-88d6-ee34-8fd6-1490eb28d8be/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 867.965618] env[68233]: ERROR oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52029f29-88d6-ee34-8fd6-1490eb28d8be/disk-0.vmdk due to incomplete transfer. [ 867.974373] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-31186778-3b2d-4da4-9955-cc8fe175c978 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.984143] env[68233]: DEBUG oslo_vmware.rw_handles [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52029f29-88d6-ee34-8fd6-1490eb28d8be/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 867.984376] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Uploaded image 71bda9d2-4a7b-4362-a322-154a9bbf9c79 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 867.986691] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 867.987246] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-dba90747-3b64-4045-bf3e-a198be65c3c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.996782] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 867.996782] env[68233]: value = "task-2782430" [ 867.996782] env[68233]: _type = "Task" [ 867.996782] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.004403] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25934} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.005233] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 868.005454] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 868.005650] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 868.011672] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782430, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.233654] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b1513f-c51a-fe3c-9cfe-518ce718e8c0, 'name': SearchDatastore_Task, 'duration_secs': 0.014545} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.233654] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae9cb143-44b6-4b3d-887e-a4972754d933 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.242304] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 868.242304] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52579840-f2fc-dfc8-695d-fd4299e3e487" [ 868.242304] env[68233]: _type = "Task" [ 868.242304] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.250812] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52579840-f2fc-dfc8-695d-fd4299e3e487, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.264775] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 88d67405-b8c6-484a-b178-68a8babb3708] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 868.293192] env[68233]: DEBUG nova.network.neutron [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 868.295353] env[68233]: DEBUG oslo_concurrency.lockutils [req-ccd6ceeb-7fd0-4877-8848-9777d13e21ca req-ea4e7b8b-525e-488d-8545-b97c525d53b4 service nova] Releasing lock "refresh_cache-2c219b8c-813d-4155-af3b-327a7ebd75fc" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.296146] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 868.348506] env[68233]: DEBUG nova.network.neutron [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Successfully created port: 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.516980] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782430, 'name': Destroy_Task, 'duration_secs': 0.385872} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.516980] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Destroyed the VM [ 868.517175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 868.517360] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7fe2be31-1479-43ba-a380-52d096f2d834 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.530940] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 868.530940] env[68233]: value = "task-2782431" [ 868.530940] env[68233]: _type = "Task" [ 868.530940] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.544858] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782431, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.730559] env[68233]: DEBUG nova.network.neutron [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Updating instance_info_cache with network_info: [{"id": "e3cadc37-6c3d-4281-982a-738d06c7f97a", "address": "fa:16:3e:2d:6e:8e", "network": {"id": "8f0ee2f0-ed37-4dbd-be05-edd625d0179d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1460809670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef8413c75d144f8838e651ac023fa1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3cadc37-6c", "ovs_interfaceid": "e3cadc37-6c3d-4281-982a-738d06c7f97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.756364] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52579840-f2fc-dfc8-695d-fd4299e3e487, 'name': SearchDatastore_Task, 'duration_secs': 0.011741} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.762020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.762020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2c219b8c-813d-4155-af3b-327a7ebd75fc/2c219b8c-813d-4155-af3b-327a7ebd75fc.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 868.762020] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3cb481d-43cb-457b-9241-3df340da6c29 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.770711] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 0f7d80d2-5c34-42f7-a14a-97f9625675a8] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 868.772559] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 868.772559] env[68233]: value = "task-2782432" [ 868.772559] env[68233]: _type = "Task" [ 868.772559] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.782389] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.840082] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e6dbc4d-1348-4c7c-8e6f-9300fa48b178 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.853434] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffca09d-313a-487d-9c88-ec2dc3124c22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.890846] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9444f5a7-4450-4efc-9c2f-c58745a9793c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.900437] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fce4796-d917-41fd-aaf3-53e81c12cc65 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.920426] env[68233]: DEBUG nova.compute.provider_tree [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.013140] env[68233]: DEBUG nova.compute.manager [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Received event network-vif-plugged-e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 869.013814] env[68233]: DEBUG oslo_concurrency.lockutils [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] Acquiring lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.014099] env[68233]: DEBUG oslo_concurrency.lockutils [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.014307] env[68233]: DEBUG oslo_concurrency.lockutils [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.014487] env[68233]: DEBUG nova.compute.manager [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] No waiting events found dispatching network-vif-plugged-e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.014656] env[68233]: WARNING nova.compute.manager [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Received unexpected event network-vif-plugged-e3cadc37-6c3d-4281-982a-738d06c7f97a for instance with vm_state building and task_state spawning. [ 869.014820] env[68233]: DEBUG nova.compute.manager [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Received event network-changed-e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 869.014981] env[68233]: DEBUG nova.compute.manager [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Refreshing instance network info cache due to event network-changed-e3cadc37-6c3d-4281-982a-738d06c7f97a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 869.015156] env[68233]: DEBUG oslo_concurrency.lockutils [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] Acquiring lock "refresh_cache-bb59f959-4cf8-4244-b7b4-6bf630a616b3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.043652] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782431, 'name': RemoveSnapshot_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.050517] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 869.050774] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.050854] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 869.051079] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.051252] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 869.051405] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 869.051619] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 869.051786] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 869.051958] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 869.052169] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 869.052380] env[68233]: DEBUG nova.virt.hardware [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 869.053629] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db123079-ea69-4c41-a855-19f6f1f3b69d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.063424] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab04fc0-289e-408e-a1d4-704831239147 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.080770] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:29:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1316f5aa-529f-4bac-8dd7-6076a9d43312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5bbc186d-7708-4c96-a2a7-454a8aae1e5c', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.089238] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.090254] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.091218] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-59697d03-d707-41c5-bdb2-dc55aeefbb8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.115385] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.115385] env[68233]: value = "task-2782433" [ 869.115385] env[68233]: _type = "Task" [ 869.115385] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.128053] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782433, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.233185] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Releasing lock "refresh_cache-bb59f959-4cf8-4244-b7b4-6bf630a616b3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.233568] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Instance network_info: |[{"id": "e3cadc37-6c3d-4281-982a-738d06c7f97a", "address": "fa:16:3e:2d:6e:8e", "network": {"id": "8f0ee2f0-ed37-4dbd-be05-edd625d0179d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1460809670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef8413c75d144f8838e651ac023fa1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3cadc37-6c", "ovs_interfaceid": "e3cadc37-6c3d-4281-982a-738d06c7f97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 869.233893] env[68233]: DEBUG oslo_concurrency.lockutils [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] Acquired lock "refresh_cache-bb59f959-4cf8-4244-b7b4-6bf630a616b3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.234090] env[68233]: DEBUG nova.network.neutron [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Refreshing network info cache for port e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 869.235449] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:6e:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b94712a6-b777-47dd-bc06-f9acfce2d936', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e3cadc37-6c3d-4281-982a-738d06c7f97a', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.242976] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Creating folder: Project (5ef8413c75d144f8838e651ac023fa1a). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.246554] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef59617c-28b6-4e6a-8979-44f63891ec3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.261556] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Created folder: Project (5ef8413c75d144f8838e651ac023fa1a) in parent group-v559223. [ 869.261874] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Creating folder: Instances. Parent ref: group-v559404. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.262294] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2971cefc-183a-42e2-b502-da9dcb754ba7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.275318] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 6ae76b0f-7df2-4652-b4c3-92c16ed487a1] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 869.281081] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Created folder: Instances in parent group-v559404. [ 869.281081] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.281437] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.281958] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f624630-2728-41e0-bd13-c560f1698ba1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.304602] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523652} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.304909] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 2c219b8c-813d-4155-af3b-327a7ebd75fc/2c219b8c-813d-4155-af3b-327a7ebd75fc.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 869.305152] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 869.305402] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.305402] env[68233]: value = "task-2782436" [ 869.305402] env[68233]: _type = "Task" [ 869.305402] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.306174] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a806fb23-c693-4fea-a050-24ed0f24210b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.309231] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 869.324359] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782436, 'name': CreateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.325854] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 869.325854] env[68233]: value = "task-2782437" [ 869.325854] env[68233]: _type = "Task" [ 869.325854] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.338706] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782437, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.344843] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 869.345110] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.345266] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 869.345452] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.345594] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 869.345738] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 869.345937] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 869.346112] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 869.346301] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 869.346630] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 869.346749] env[68233]: DEBUG nova.virt.hardware [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 869.347615] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25436eb1-cc2d-43c3-ac15-5aac1ee0e3df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.357669] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a76959-1351-4a52-bff4-56f043334246 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.424103] env[68233]: DEBUG nova.scheduler.client.report [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.547554] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782431, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.622825] env[68233]: DEBUG nova.network.neutron [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Updated VIF entry in instance network info cache for port e3cadc37-6c3d-4281-982a-738d06c7f97a. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 869.623853] env[68233]: DEBUG nova.network.neutron [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Updating instance_info_cache with network_info: [{"id": "e3cadc37-6c3d-4281-982a-738d06c7f97a", "address": "fa:16:3e:2d:6e:8e", "network": {"id": "8f0ee2f0-ed37-4dbd-be05-edd625d0179d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1460809670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ef8413c75d144f8838e651ac023fa1a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b94712a6-b777-47dd-bc06-f9acfce2d936", "external-id": "nsx-vlan-transportzone-494", "segmentation_id": 494, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape3cadc37-6c", "ovs_interfaceid": "e3cadc37-6c3d-4281-982a-738d06c7f97a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.632397] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782433, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.780076] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 769956c6-7824-41db-9779-fc1b5f53dd94] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 869.823455] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782436, 'name': CreateVM_Task, 'duration_secs': 0.460825} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.823771] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 869.824773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.825065] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.825527] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 869.825952] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b99ba7c6-e326-4517-9a72-b24addf4b50a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.838221] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 869.838221] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a8e24e-1639-2213-aa80-539f7ba2ee6a" [ 869.838221] env[68233]: _type = "Task" [ 869.838221] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.843197] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782437, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091657} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.848374] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 869.849623] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb0bb6e-00f2-43d4-8156-207459e7e8e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.863092] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a8e24e-1639-2213-aa80-539f7ba2ee6a, 'name': SearchDatastore_Task, 'duration_secs': 0.01187} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.879531] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.879901] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.880407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.880671] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.880969] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.897097] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 2c219b8c-813d-4155-af3b-327a7ebd75fc/2c219b8c-813d-4155-af3b-327a7ebd75fc.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 869.897554] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0eac4fd4-8762-4519-872f-50c7c83fdbd3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.901476] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95e2860b-24b7-4069-bfeb-98b02cb4a8c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.931526] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.932384] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 869.936175] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 34.616s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.939611] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.939809] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.941446] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e76ac94-ed11-4225-887b-d32ba6352162 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.943983] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 869.943983] env[68233]: value = "task-2782438" [ 869.943983] env[68233]: _type = "Task" [ 869.943983] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.949079] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 869.949079] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aa9cf8-4e4f-2548-8998-9da6355893b8" [ 869.949079] env[68233]: _type = "Task" [ 869.949079] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.957056] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782438, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.963433] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aa9cf8-4e4f-2548-8998-9da6355893b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.047975] env[68233]: DEBUG oslo_vmware.api [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782431, 'name': RemoveSnapshot_Task, 'duration_secs': 1.078191} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.048210] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 870.048443] env[68233]: INFO nova.compute.manager [None req-1372ff62-1368-49a4-a994-bc60c729e8c8 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Took 15.83 seconds to snapshot the instance on the hypervisor. [ 870.130827] env[68233]: DEBUG oslo_concurrency.lockutils [req-d4bf6ee1-c261-4a74-9366-b56c30c5d5b4 req-4f917c83-f7ed-476f-b68a-0ee1f7481945 service nova] Releasing lock "refresh_cache-bb59f959-4cf8-4244-b7b4-6bf630a616b3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.131667] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782433, 'name': CreateVM_Task, 'duration_secs': 0.561515} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.133055] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 870.133393] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.133730] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.134210] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 870.134609] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a05586c-4338-4229-bba8-6d01fb063325 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.143289] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 870.143289] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52608642-586e-2d24-8c1b-940aeef5b856" [ 870.143289] env[68233]: _type = "Task" [ 870.143289] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.153543] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52608642-586e-2d24-8c1b-940aeef5b856, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.179032] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "0f813d55-2737-44ae-b62d-3321e77dfdab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.179572] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.220305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "903f0919-b321-4d74-9ea2-bc9771184ded" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.221512] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "903f0919-b321-4d74-9ea2-bc9771184ded" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.283207] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 75f58a50-7891-42df-8820-c997300a3159] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 870.334674] env[68233]: DEBUG nova.network.neutron [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Successfully updated port: 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 870.438435] env[68233]: DEBUG nova.objects.instance [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lazy-loading 'migration_context' on Instance uuid 72467d49-6fa8-42db-871e-4e50e77eedf7 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.440852] env[68233]: DEBUG nova.compute.utils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.443536] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 870.443749] env[68233]: DEBUG nova.network.neutron [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 870.462640] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aa9cf8-4e4f-2548-8998-9da6355893b8, 'name': SearchDatastore_Task, 'duration_secs': 0.0166} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.466577] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782438, 'name': ReconfigVM_Task, 'duration_secs': 0.343629} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.467090] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-199252da-e6b3-43d1-9803-0e2fc8a941d9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.469504] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 2c219b8c-813d-4155-af3b-327a7ebd75fc/2c219b8c-813d-4155-af3b-327a7ebd75fc.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 870.470247] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c3ca281f-af0b-4b04-a3a2-e7bfdb70197e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.475953] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 870.475953] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bdefb2-f921-c809-1a3f-0334baaab0e7" [ 870.475953] env[68233]: _type = "Task" [ 870.475953] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.477360] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 870.477360] env[68233]: value = "task-2782439" [ 870.477360] env[68233]: _type = "Task" [ 870.477360] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.491351] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bdefb2-f921-c809-1a3f-0334baaab0e7, 'name': SearchDatastore_Task, 'duration_secs': 0.013209} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.494812] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.495129] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] bb59f959-4cf8-4244-b7b4-6bf630a616b3/bb59f959-4cf8-4244-b7b4-6bf630a616b3.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.495775] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782439, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.496013] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1127b01-1bb8-46d3-9a91-40ec4324b867 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.504883] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 870.504883] env[68233]: value = "task-2782440" [ 870.504883] env[68233]: _type = "Task" [ 870.504883] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.514796] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782440, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.516482] env[68233]: DEBUG nova.policy [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '434654c75b9b4ddaaf3714b355c2a5bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd921fe0876de499dbc86529a00b2c6f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 870.657026] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52608642-586e-2d24-8c1b-940aeef5b856, 'name': SearchDatastore_Task, 'duration_secs': 0.012554} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.657026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 870.657026] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 870.657026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.657026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.657026] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 870.657026] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e639704e-23e3-41bd-a148-19ba9d3f685a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.666194] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 870.666433] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 870.667677] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a94faf8-8c50-411f-9121-11f8b5b58fc2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.677568] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 870.677568] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5288da49-6a0b-75a8-dc2c-7d1755155e1d" [ 870.677568] env[68233]: _type = "Task" [ 870.677568] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.686384] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.689520] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5288da49-6a0b-75a8-dc2c-7d1755155e1d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.787586] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: ba4ad2f8-fad1-45be-b2b1-68c3a58f3750] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 870.836793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.836986] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.837128] env[68233]: DEBUG nova.network.neutron [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.944431] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 870.999946] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782439, 'name': Rename_Task, 'duration_secs': 0.165169} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.999946] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.999946] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bb98f1c-a9d2-46be-a850-111eab0f4a10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.013589] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 871.013589] env[68233]: value = "task-2782441" [ 871.013589] env[68233]: _type = "Task" [ 871.013589] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.026098] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782440, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.032597] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782441, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.074321] env[68233]: DEBUG nova.network.neutron [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Successfully created port: 85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.154155] env[68233]: DEBUG nova.compute.manager [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-vif-plugged-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 871.154155] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] Acquiring lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.154155] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.154439] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.154542] env[68233]: DEBUG nova.compute.manager [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] No waiting events found dispatching network-vif-plugged-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.154729] env[68233]: WARNING nova.compute.manager [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received unexpected event network-vif-plugged-5d587ebc-2b71-4893-96e5-f636d9a634a4 for instance with vm_state building and task_state spawning. [ 871.154911] env[68233]: DEBUG nova.compute.manager [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 871.155088] env[68233]: DEBUG nova.compute.manager [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing instance network info cache due to event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 871.155264] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.192050] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5288da49-6a0b-75a8-dc2c-7d1755155e1d, 'name': SearchDatastore_Task, 'duration_secs': 0.011759} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.193311] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74e4dbf2-05d1-4dbe-b736-25d6621fc7c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.206238] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 871.206238] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6f922-d8f9-cbde-3e19-8eabf7cda478" [ 871.206238] env[68233]: _type = "Task" [ 871.206238] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.213124] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.217476] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6f922-d8f9-cbde-3e19-8eabf7cda478, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.290921] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: d1577f70-4fb6-4b0b-9d41-8d245c26c90c] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 871.399189] env[68233]: DEBUG nova.network.neutron [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 871.529024] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782440, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52035} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.529024] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] bb59f959-4cf8-4244-b7b4-6bf630a616b3/bb59f959-4cf8-4244-b7b4-6bf630a616b3.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.529024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.529024] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78da943b-82e8-4139-bb90-f807fd9742a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.537204] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782441, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.543471] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 871.543471] env[68233]: value = "task-2782442" [ 871.543471] env[68233]: _type = "Task" [ 871.543471] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.554063] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782442, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.574759] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d44330-75f5-4185-acee-d44847f2dd20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.586533] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daf35479-9846-4e12-97a0-bc7ffb5e846f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.624635] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed3a7bd-0598-4b12-8cb0-a33b11114656 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.633573] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5da587-d43f-4ff7-a693-0a5d296525fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.649085] env[68233]: DEBUG nova.compute.provider_tree [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.711334] env[68233]: DEBUG nova.network.neutron [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.719793] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6f922-d8f9-cbde-3e19-8eabf7cda478, 'name': SearchDatastore_Task, 'duration_secs': 0.012536} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.722350] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.722350] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 871.722350] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a464326-434a-4e35-86d1-6b26273b8ddd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.729324] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 871.729324] env[68233]: value = "task-2782443" [ 871.729324] env[68233]: _type = "Task" [ 871.729324] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.738916] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.794354] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 90d88fcb-6141-499c-b049-ddfc9e210d5c] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 871.957265] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 871.984139] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 871.984548] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.984715] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 871.984913] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.985072] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 871.985229] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 871.985448] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 871.985853] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 871.985853] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 871.985991] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 871.986118] env[68233]: DEBUG nova.virt.hardware [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 871.987419] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e62506-bcf6-4408-853e-f25e3ff251bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.998258] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18b0825-48db-400c-938c-989f95fd0116 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.028531] env[68233]: DEBUG oslo_vmware.api [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782441, 'name': PowerOnVM_Task, 'duration_secs': 0.531299} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.028819] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 872.029046] env[68233]: INFO nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Took 8.23 seconds to spawn the instance on the hypervisor. [ 872.029277] env[68233]: DEBUG nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.030137] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0539a723-53f9-4f24-a9ab-924f0a532530 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.055190] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101321} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.055600] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 872.056408] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d434f869-3d7d-4956-b43e-92f1b658f3a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.084071] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] bb59f959-4cf8-4244-b7b4-6bf630a616b3/bb59f959-4cf8-4244-b7b4-6bf630a616b3.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 872.085319] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48a16247-e0b4-4eff-886f-e8280aa9876b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.112301] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 872.112301] env[68233]: value = "task-2782444" [ 872.112301] env[68233]: _type = "Task" [ 872.112301] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.123913] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.153329] env[68233]: DEBUG nova.scheduler.client.report [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.214551] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.214754] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Instance network_info: |[{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 872.215169] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.215358] env[68233]: DEBUG nova.network.neutron [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.217216] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:dc:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5ba07329-1d3e-4ba8-8774-d029262318c4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d587ebc-2b71-4893-96e5-f636d9a634a4', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.226131] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Creating folder: Project (ed5b204b46f445e4a12938af770ecbb5). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.227496] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12568c52-988d-4a59-8b19-67cb308420b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.241130] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782443, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.242570] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Created folder: Project (ed5b204b46f445e4a12938af770ecbb5) in parent group-v559223. [ 872.242761] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Creating folder: Instances. Parent ref: group-v559407. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.243015] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44d1de6b-9434-4c90-b4c5-1c61368724e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.257394] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Created folder: Instances in parent group-v559407. [ 872.257667] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 872.257871] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.258190] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b97cf621-986b-48ec-a9a7-3f22a965aa22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.281567] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.281567] env[68233]: value = "task-2782447" [ 872.281567] env[68233]: _type = "Task" [ 872.281567] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.293331] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782447, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.298063] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 19a1441d-9621-4e6e-ac38-8ad08206facf] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 872.555023] env[68233]: INFO nova.compute.manager [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Took 54.83 seconds to build instance. [ 872.624728] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.741637] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782443, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559825} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.742502] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 872.742776] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 872.743129] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-accae367-f63f-4a8b-8c22-ee0f23f32bee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.752733] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 872.752733] env[68233]: value = "task-2782448" [ 872.752733] env[68233]: _type = "Task" [ 872.752733] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.765937] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782448, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.792834] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782447, 'name': CreateVM_Task, 'duration_secs': 0.490528} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.793039] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 872.793778] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.793953] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.794326] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 872.794596] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b23df82e-5a60-4655-bad8-c2748459cd30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.800893] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 872.800893] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c155e0-ac79-5064-cceb-401139d78ec5" [ 872.800893] env[68233]: _type = "Task" [ 872.800893] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.801429] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: d19421ad-88d5-4479-a6e4-c6d59e863b31] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 872.814424] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c155e0-ac79-5064-cceb-401139d78ec5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.924853] env[68233]: DEBUG nova.compute.manager [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 872.926496] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a82b5c9a-b93c-40ba-a6f8-78e240a2874d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.057009] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c176000e-9956-4daa-8de5-222145a2d2a8 tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.545s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.130527] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782444, 'name': ReconfigVM_Task, 'duration_secs': 0.653955} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.130827] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Reconfigured VM instance instance-00000043 to attach disk [datastore2] bb59f959-4cf8-4244-b7b4-6bf630a616b3/bb59f959-4cf8-4244-b7b4-6bf630a616b3.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.131568] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82e353e6-1288-4f25-9271-c29fc6ac65bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.140732] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 873.140732] env[68233]: value = "task-2782449" [ 873.140732] env[68233]: _type = "Task" [ 873.140732] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.152757] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782449, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.165413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.229s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.172619] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.277s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.174321] env[68233]: INFO nova.compute.claims [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.189862] env[68233]: DEBUG nova.network.neutron [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updated VIF entry in instance network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.189862] env[68233]: DEBUG nova.network.neutron [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.201552] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "2c219b8c-813d-4155-af3b-327a7ebd75fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.205019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.205019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "2c219b8c-813d-4155-af3b-327a7ebd75fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.205019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.205019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.206227] env[68233]: INFO nova.compute.manager [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Terminating instance [ 873.266483] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782448, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069961} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.267858] env[68233]: DEBUG nova.network.neutron [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Successfully updated port: 85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.269329] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 873.270827] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1fb55c-5c4c-40b9-b9f5-49f514579c89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.296046] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 873.297024] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41497272-7e19-48ac-8f44-0b3cc73e0952 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.314030] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: a340c66c-74eb-43e5-8e72-54d9c8b07a26] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 873.326571] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c155e0-ac79-5064-cceb-401139d78ec5, 'name': SearchDatastore_Task, 'duration_secs': 0.017383} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.327993] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.328695] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 873.328695] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.328965] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.329505] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 873.329665] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 873.329665] env[68233]: value = "task-2782450" [ 873.329665] env[68233]: _type = "Task" [ 873.329665] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.329815] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c17e9524-b807-49f0-8dd4-6be4d196781d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.341136] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782450, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.343099] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 873.343275] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 873.344274] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bc5ac89-2a12-4fcd-ac2a-5f8796100685 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.351154] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 873.351154] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5260274a-6a92-901d-9261-31f4ef2671aa" [ 873.351154] env[68233]: _type = "Task" [ 873.351154] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.360626] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5260274a-6a92-901d-9261-31f4ef2671aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.408909] env[68233]: DEBUG nova.compute.manager [req-b22fc625-ca93-4eeb-9a6e-d48855e267c1 req-d6aa1bfa-eb63-4173-9113-a7171ad15c91 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Received event network-vif-plugged-85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 873.409319] env[68233]: DEBUG oslo_concurrency.lockutils [req-b22fc625-ca93-4eeb-9a6e-d48855e267c1 req-d6aa1bfa-eb63-4173-9113-a7171ad15c91 service nova] Acquiring lock "22c06baf-6316-4531-8037-b8b77c401596-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.409558] env[68233]: DEBUG oslo_concurrency.lockutils [req-b22fc625-ca93-4eeb-9a6e-d48855e267c1 req-d6aa1bfa-eb63-4173-9113-a7171ad15c91 service nova] Lock "22c06baf-6316-4531-8037-b8b77c401596-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.409737] env[68233]: DEBUG oslo_concurrency.lockutils [req-b22fc625-ca93-4eeb-9a6e-d48855e267c1 req-d6aa1bfa-eb63-4173-9113-a7171ad15c91 service nova] Lock "22c06baf-6316-4531-8037-b8b77c401596-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.409912] env[68233]: DEBUG nova.compute.manager [req-b22fc625-ca93-4eeb-9a6e-d48855e267c1 req-d6aa1bfa-eb63-4173-9113-a7171ad15c91 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] No waiting events found dispatching network-vif-plugged-85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 873.410098] env[68233]: WARNING nova.compute.manager [req-b22fc625-ca93-4eeb-9a6e-d48855e267c1 req-d6aa1bfa-eb63-4173-9113-a7171ad15c91 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Received unexpected event network-vif-plugged-85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 for instance with vm_state building and task_state spawning. [ 873.441764] env[68233]: INFO nova.compute.manager [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] instance snapshotting [ 873.444833] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750a91e7-2e0c-4b95-a830-0e39ad1e34c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.469114] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a239c3-763a-48b2-b829-b050bac1c002 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.560894] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 873.652116] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782449, 'name': Rename_Task, 'duration_secs': 0.168658} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.652613] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.652728] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ece2843e-ae67-4247-8be6-dbc771e973d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.661383] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 873.661383] env[68233]: value = "task-2782451" [ 873.661383] env[68233]: _type = "Task" [ 873.661383] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.676089] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.693586] env[68233]: DEBUG oslo_concurrency.lockutils [req-6f858342-759f-4c94-8df1-3e232c782609 req-5e0c91fa-b79b-4b7a-b20a-6be828d4fc2a service nova] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.713268] env[68233]: DEBUG nova.compute.manager [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 873.713565] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 873.714670] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f73fd67-b3be-4695-a802-e9975e511a1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.724214] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 873.724393] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8ba3bab-056c-48a0-b7f9-b91ee2570ea0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.732050] env[68233]: DEBUG oslo_vmware.api [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 873.732050] env[68233]: value = "task-2782452" [ 873.732050] env[68233]: _type = "Task" [ 873.732050] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.742608] env[68233]: DEBUG oslo_vmware.api [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.771225] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "refresh_cache-22c06baf-6316-4531-8037-b8b77c401596" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.771472] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "refresh_cache-22c06baf-6316-4531-8037-b8b77c401596" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.771669] env[68233]: DEBUG nova.network.neutron [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.820753] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 35cbc15b-48d8-4acd-a957-eec3421df1ce] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 873.843226] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782450, 'name': ReconfigVM_Task, 'duration_secs': 0.337872} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.843583] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Reconfigured VM instance instance-00000016 to attach disk [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd/c6a358b7-0e6a-43bb-a171-5e6175f947bd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.844313] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1b307ae-9251-47a3-bacc-ee743f540651 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.853108] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 873.853108] env[68233]: value = "task-2782453" [ 873.853108] env[68233]: _type = "Task" [ 873.853108] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.870910] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782453, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.871254] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5260274a-6a92-901d-9261-31f4ef2671aa, 'name': SearchDatastore_Task, 'duration_secs': 0.015366} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.873034] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5f88c84-7609-464f-922b-68f7d36fc7db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.878439] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527487f1-bd3d-d2fb-7261-67a223b1afad/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 873.879355] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a3f4a8-a814-4be4-ba2f-f6c2cb308374 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.883493] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 873.883493] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52761841-9b86-440f-97cb-21f04c596a42" [ 873.883493] env[68233]: _type = "Task" [ 873.883493] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.888418] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527487f1-bd3d-d2fb-7261-67a223b1afad/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 873.888581] env[68233]: ERROR oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527487f1-bd3d-d2fb-7261-67a223b1afad/disk-0.vmdk due to incomplete transfer. [ 873.889147] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e1b2ffa8-0c38-43c2-8bc8-011f8a2d8339 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.894197] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52761841-9b86-440f-97cb-21f04c596a42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.897193] env[68233]: DEBUG oslo_vmware.rw_handles [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/527487f1-bd3d-d2fb-7261-67a223b1afad/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 873.897607] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Uploaded image fa91d76b-2020-4abf-8837-92f1504eee4f to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 873.898944] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 873.899469] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ddfb188d-ee55-40ec-90b6-232d085814e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.906602] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 873.906602] env[68233]: value = "task-2782454" [ 873.906602] env[68233]: _type = "Task" [ 873.906602] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.915980] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782454, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.982259] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 873.982693] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ef9d681b-360d-4d06-8db4-89fa2bfdd77a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.992011] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 873.992011] env[68233]: value = "task-2782455" [ 873.992011] env[68233]: _type = "Task" [ 873.992011] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.005541] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782455, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.085821] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.173545] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782451, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.245789] env[68233]: DEBUG oslo_vmware.api [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782452, 'name': PowerOffVM_Task, 'duration_secs': 0.308571} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.247109] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 874.247319] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 874.247588] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b38e598-f670-4d2e-a48d-f2963fc5d307 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.315214] env[68233]: DEBUG nova.network.neutron [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.324657] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 38c86c2b-9b2b-482e-b26d-066208467202] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 874.345564] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 874.345690] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 874.345874] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Deleting the datastore file [datastore2] 2c219b8c-813d-4155-af3b-327a7ebd75fc {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 874.348508] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22d871f8-b85e-489b-a574-dba5f7fe6ab7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.359431] env[68233]: DEBUG oslo_vmware.api [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for the task: (returnval){ [ 874.359431] env[68233]: value = "task-2782457" [ 874.359431] env[68233]: _type = "Task" [ 874.359431] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.375845] env[68233]: DEBUG oslo_vmware.api [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782457, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.378747] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782453, 'name': Rename_Task, 'duration_secs': 0.195688} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.381310] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 874.381743] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5e22eaa-a6cb-47d5-ab87-8960732ea30c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.392097] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 874.392097] env[68233]: value = "task-2782458" [ 874.392097] env[68233]: _type = "Task" [ 874.392097] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.400747] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52761841-9b86-440f-97cb-21f04c596a42, 'name': SearchDatastore_Task, 'duration_secs': 0.015809} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.400747] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.400747] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/3d759f4f-3845-4bb5-8cfa-639b7023bb27.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 874.401180] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86c96730-e3c2-4a51-a4f7-d7562125fddc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.406358] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.413108] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 874.413108] env[68233]: value = "task-2782459" [ 874.413108] env[68233]: _type = "Task" [ 874.413108] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.419635] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782454, 'name': Destroy_Task, 'duration_secs': 0.39366} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.420268] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Destroyed the VM [ 874.420511] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 874.420754] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0484c9bd-7364-4e58-9740-407f23acf22b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.428279] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.433143] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 874.433143] env[68233]: value = "task-2782460" [ 874.433143] env[68233]: _type = "Task" [ 874.433143] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.443184] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782460, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.502787] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782455, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.591158] env[68233]: DEBUG nova.network.neutron [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Updating instance_info_cache with network_info: [{"id": "85347f9f-ac3e-42ee-a5bd-bf41164a6cc1", "address": "fa:16:3e:5d:8a:45", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85347f9f-ac", "ovs_interfaceid": "85347f9f-ac3e-42ee-a5bd-bf41164a6cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.676047] env[68233]: DEBUG oslo_vmware.api [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782451, 'name': PowerOnVM_Task, 'duration_secs': 0.531908} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.678921] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.679166] env[68233]: INFO nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Took 8.21 seconds to spawn the instance on the hypervisor. [ 874.679363] env[68233]: DEBUG nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 874.680737] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18ed2b2-dae5-47af-983c-5f8724c1f62b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.724578] env[68233]: INFO nova.compute.manager [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Swapping old allocation on dict_keys(['51aa13e7-0977-4031-b209-4ae90c83752c']) held by migration c46ea15e-6075-47b3-b44f-d79f032a7b76 for instance [ 874.758501] env[68233]: DEBUG nova.scheduler.client.report [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Overwriting current allocation {'allocations': {'51aa13e7-0977-4031-b209-4ae90c83752c': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 93}}, 'project_id': '8d6e2fbf0f9c4fb0bf99e71506798d7c', 'user_id': '673cdb014f4949baa648cb8d661293eb', 'consumer_generation': 1} on consumer 72467d49-6fa8-42db-871e-4e50e77eedf7 {{(pid=68233) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 874.763648] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fcf8b8-f63d-40dd-b804-b2b557bac4c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.774993] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b4ef57f-f4c9-43f3-bc6c-072c07ac6038 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.811458] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7932d92-7a1b-4f1f-820b-f0b8f3b488fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.824061] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cbc778-932e-4bc6-88a1-3c9c73b92974 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.831679] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 9eeb90c6-6ac2-43cb-887a-b69a28dc43a6] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 874.841449] env[68233]: DEBUG nova.compute.provider_tree [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.862281] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.862520] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquired lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.862703] env[68233]: DEBUG nova.network.neutron [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.875669] env[68233]: DEBUG oslo_vmware.api [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Task: {'id': task-2782457, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287154} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.876740] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 874.876944] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 874.877163] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 874.877359] env[68233]: INFO nova.compute.manager [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Took 1.16 seconds to destroy the instance on the hypervisor. [ 874.877602] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 874.878125] env[68233]: DEBUG nova.compute.manager [-] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 874.878258] env[68233]: DEBUG nova.network.neutron [-] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 874.905705] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782458, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.924805] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782459, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.944722] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782460, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.004644] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782455, 'name': CreateSnapshot_Task, 'duration_secs': 0.955591} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.004947] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 875.005792] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33debecb-7591-42e9-bf8c-8088e3597fa3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.095637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "refresh_cache-22c06baf-6316-4531-8037-b8b77c401596" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.096068] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Instance network_info: |[{"id": "85347f9f-ac3e-42ee-a5bd-bf41164a6cc1", "address": "fa:16:3e:5d:8a:45", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85347f9f-ac", "ovs_interfaceid": "85347f9f-ac3e-42ee-a5bd-bf41164a6cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 875.096570] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:8a:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85347f9f-ac3e-42ee-a5bd-bf41164a6cc1', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.106446] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.106446] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.106446] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-08273c88-5da4-4554-ad9b-111935d33beb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.139421] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.139421] env[68233]: value = "task-2782461" [ 875.139421] env[68233]: _type = "Task" [ 875.139421] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.153925] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782461, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.203168] env[68233]: INFO nova.compute.manager [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Took 54.74 seconds to build instance. [ 875.344110] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 68a4e635-381d-4dc2-879c-5581cd5e189a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 875.352119] env[68233]: DEBUG nova.scheduler.client.report [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.404642] env[68233]: DEBUG oslo_vmware.api [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782458, 'name': PowerOnVM_Task, 'duration_secs': 0.794259} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.404906] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 875.405185] env[68233]: DEBUG nova.compute.manager [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 875.405972] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b249498f-c39c-4510-b973-fca73a94c02a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.429688] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648791} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.429950] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/3d759f4f-3845-4bb5-8cfa-639b7023bb27.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 875.430177] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 875.430422] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed93ae38-1af4-40ac-92ac-34ceb187106f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.440035] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 875.440035] env[68233]: value = "task-2782462" [ 875.440035] env[68233]: _type = "Task" [ 875.440035] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.448952] env[68233]: DEBUG oslo_vmware.api [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782460, 'name': RemoveSnapshot_Task, 'duration_secs': 0.805932} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.450701] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 875.451158] env[68233]: INFO nova.compute.manager [None req-68dc0a54-9db2-460c-a247-e28db978aed4 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Took 14.97 seconds to snapshot the instance on the hypervisor. [ 875.459646] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782462, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.530457] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 875.531316] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-947af232-ab2c-4598-ba6e-70828ede45d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.544428] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 875.544428] env[68233]: value = "task-2782463" [ 875.544428] env[68233]: _type = "Task" [ 875.544428] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.555878] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782463, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.651665] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782461, 'name': CreateVM_Task, 'duration_secs': 0.411381} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.653262] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.655936] env[68233]: DEBUG nova.compute.manager [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Received event network-changed-85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 875.656281] env[68233]: DEBUG nova.compute.manager [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Refreshing instance network info cache due to event network-changed-85347f9f-ac3e-42ee-a5bd-bf41164a6cc1. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 875.656670] env[68233]: DEBUG oslo_concurrency.lockutils [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] Acquiring lock "refresh_cache-22c06baf-6316-4531-8037-b8b77c401596" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.656965] env[68233]: DEBUG oslo_concurrency.lockutils [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] Acquired lock "refresh_cache-22c06baf-6316-4531-8037-b8b77c401596" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.657246] env[68233]: DEBUG nova.network.neutron [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Refreshing network info cache for port 85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.659084] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.659381] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.659871] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 875.660654] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c612266-1c05-433f-a6eb-e713d22ae3ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.669243] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 875.669243] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52433bb3-c5c4-e5e1-3e1c-f153a37d1acc" [ 875.669243] env[68233]: _type = "Task" [ 875.669243] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.680060] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52433bb3-c5c4-e5e1-3e1c-f153a37d1acc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.705941] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7472a6b3-1e41-417c-afb7-a6c20e51ff01 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.339s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.792085] env[68233]: DEBUG nova.network.neutron [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [{"id": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "address": "fa:16:3e:54:02:42", "network": {"id": "22c86a97-31f3-4248-92bf-9701cfcfc68a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.201", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b18dbcbda07a49409f8351bc3bf7427c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56b944d8-803d-43f2-945d-0f334ee4ea1c", "external-id": "nsx-vlan-transportzone-799", "segmentation_id": 799, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1c30459d-e8", "ovs_interfaceid": "1c30459d-e88b-42bd-8073-04aa89cecbc3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.840617] env[68233]: DEBUG nova.network.neutron [-] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.853344] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 102187bd-0cb2-4496-8dd0-9101b24ee4fa] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 875.855541] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.683s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.856013] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 875.861166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.587s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.861166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.861964] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.209s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 875.864775] env[68233]: INFO nova.compute.claims [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.887346] env[68233]: INFO nova.scheduler.client.report [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Deleted allocations for instance f2af60e6-496c-4edb-9e99-4b45fa94bfeb [ 875.931189] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.951254] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782462, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080578} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.951503] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 875.954761] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff11b47a-3129-457e-8b6d-f0df13569e04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.983949] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/3d759f4f-3845-4bb5-8cfa-639b7023bb27.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 875.984377] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d118df2a-3a2b-41ac-8f45-464675ee52fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.007297] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 876.007297] env[68233]: value = "task-2782464" [ 876.007297] env[68233]: _type = "Task" [ 876.007297] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.017261] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.026909] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "1207585c-fb2a-43b7-aec2-c3a7889255a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.027188] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.055408] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782463, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.180693] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52433bb3-c5c4-e5e1-3e1c-f153a37d1acc, 'name': SearchDatastore_Task, 'duration_secs': 0.016288} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.180693] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.180693] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.180693] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.180693] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.180693] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.180693] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b248b8f-f387-40dd-9850-1dd80c0729e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.192033] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.192214] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.193070] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f667d48f-4497-486c-905e-f8125b3d59c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.201194] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 876.201194] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a4669c-42e8-b300-af5d-eb5b9b22281c" [ 876.201194] env[68233]: _type = "Task" [ 876.201194] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.209935] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a4669c-42e8-b300-af5d-eb5b9b22281c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.294729] env[68233]: DEBUG oslo_concurrency.lockutils [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Releasing lock "refresh_cache-72467d49-6fa8-42db-871e-4e50e77eedf7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.295203] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 876.295496] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed27e640-2179-49ee-8012-755714963a6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.304432] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 876.304432] env[68233]: value = "task-2782465" [ 876.304432] env[68233]: _type = "Task" [ 876.304432] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.314420] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.343829] env[68233]: INFO nova.compute.manager [-] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Took 1.47 seconds to deallocate network for instance. [ 876.362268] env[68233]: DEBUG nova.compute.utils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 876.363086] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 34889575-95ea-451c-aa59-49a5f30d4e4c] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 876.367213] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.367375] env[68233]: DEBUG nova.network.neutron [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.396541] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23f6a0d6-ce97-4b85-9bbd-30fcb3076c84 tempest-SecurityGroupsTestJSON-2088234515 tempest-SecurityGroupsTestJSON-2088234515-project-member] Lock "f2af60e6-496c-4edb-9e99-4b45fa94bfeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.900s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.440507] env[68233]: DEBUG nova.policy [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32a9aa69d60843bba0e1b435bdf2454c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1f138d0135943fb87024b943f2dba25', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 876.518115] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782464, 'name': ReconfigVM_Task, 'duration_secs': 0.505463} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.520246] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/3d759f4f-3845-4bb5-8cfa-639b7023bb27.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.520966] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23100cc0-bc6d-4541-9e99-b65ceef672ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.528226] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 876.528226] env[68233]: value = "task-2782466" [ 876.528226] env[68233]: _type = "Task" [ 876.528226] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.531638] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 876.540808] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782466, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.557603] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782463, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.597796] env[68233]: DEBUG nova.network.neutron [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Updated VIF entry in instance network info cache for port 85347f9f-ac3e-42ee-a5bd-bf41164a6cc1. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.598176] env[68233]: DEBUG nova.network.neutron [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Updating instance_info_cache with network_info: [{"id": "85347f9f-ac3e-42ee-a5bd-bf41164a6cc1", "address": "fa:16:3e:5d:8a:45", "network": {"id": "bcf950f4-e3cd-4d0d-98d2-c2169047f3a6", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-2030262372-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d921fe0876de499dbc86529a00b2c6f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85347f9f-ac", "ovs_interfaceid": "85347f9f-ac3e-42ee-a5bd-bf41164a6cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.712363] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a4669c-42e8-b300-af5d-eb5b9b22281c, 'name': SearchDatastore_Task, 'duration_secs': 0.01394} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.713193] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-030ef316-4b1e-4799-a52c-e6c37bfb4bc0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.723188] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 876.723188] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525ea1ae-903c-9ea3-bde3-a208b7ebca75" [ 876.723188] env[68233]: _type = "Task" [ 876.723188] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.734161] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525ea1ae-903c-9ea3-bde3-a208b7ebca75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.814937] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782465, 'name': PowerOffVM_Task, 'duration_secs': 0.230567} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.815244] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 876.815894] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:51:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='508f22af-e037-4878-8980-ab644bbabaa4',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1432814527',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 876.816132] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 876.816297] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 876.816479] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 876.816626] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 876.816772] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 876.816976] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 876.817149] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 876.817318] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 876.817508] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 876.817707] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 876.823126] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b02b584-93f5-4e31-a7d5-ade73ecfe5d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.840877] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 876.840877] env[68233]: value = "task-2782467" [ 876.840877] env[68233]: _type = "Task" [ 876.840877] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.851437] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.851714] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782467, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.871774] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 876.871774] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 6105602a-b8eb-4128-a492-b60a9468018f] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 876.941311] env[68233]: DEBUG nova.network.neutron [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Successfully created port: 4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.046842] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782466, 'name': Rename_Task, 'duration_secs': 0.226852} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.049934] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 877.050736] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-554b394d-4acc-4f6a-be26-a58f5a697e94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.059187] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782463, 'name': CloneVM_Task, 'duration_secs': 1.327509} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.060456] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Created linked-clone VM from snapshot [ 877.060768] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 877.060768] env[68233]: value = "task-2782468" [ 877.060768] env[68233]: _type = "Task" [ 877.060768] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.061488] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db710e1c-4c45-43fc-9dc1-23b3ad70d172 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.065663] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.081134] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Uploading image f91c1add-305a-4e51-ac18-5438e9f1375a {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 877.083442] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782468, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.103913] env[68233]: DEBUG oslo_concurrency.lockutils [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] Releasing lock "refresh_cache-22c06baf-6316-4531-8037-b8b77c401596" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.104430] env[68233]: DEBUG nova.compute.manager [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Received event network-vif-deleted-d5ff7193-47a7-4f21-8f05-413e7d813756 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 877.104634] env[68233]: INFO nova.compute.manager [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Neutron deleted interface d5ff7193-47a7-4f21-8f05-413e7d813756; detaching it from the instance and deleting it from the info cache [ 877.104809] env[68233]: DEBUG nova.network.neutron [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 877.123919] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 877.123919] env[68233]: value = "vm-559412" [ 877.123919] env[68233]: _type = "VirtualMachine" [ 877.123919] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 877.124688] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3714ab91-31af-4d5c-9138-2010da39b1ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.136884] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lease: (returnval){ [ 877.136884] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234246c-a0a8-55ee-66fa-a3e1d53d3b54" [ 877.136884] env[68233]: _type = "HttpNfcLease" [ 877.136884] env[68233]: } obtained for exporting VM: (result){ [ 877.136884] env[68233]: value = "vm-559412" [ 877.136884] env[68233]: _type = "VirtualMachine" [ 877.136884] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 877.137316] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the lease: (returnval){ [ 877.137316] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234246c-a0a8-55ee-66fa-a3e1d53d3b54" [ 877.137316] env[68233]: _type = "HttpNfcLease" [ 877.137316] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 877.148028] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 877.148028] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234246c-a0a8-55ee-66fa-a3e1d53d3b54" [ 877.148028] env[68233]: _type = "HttpNfcLease" [ 877.148028] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 877.226621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "03688e90-5433-47ca-baaa-75861ad093b7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.226621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "03688e90-5433-47ca-baaa-75861ad093b7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.226621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "03688e90-5433-47ca-baaa-75861ad093b7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.226621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "03688e90-5433-47ca-baaa-75861ad093b7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.226621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "03688e90-5433-47ca-baaa-75861ad093b7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.227608] env[68233]: INFO nova.compute.manager [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Terminating instance [ 877.242263] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525ea1ae-903c-9ea3-bde3-a208b7ebca75, 'name': SearchDatastore_Task, 'duration_secs': 0.024522} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.243268] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.243598] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 22c06baf-6316-4531-8037-b8b77c401596/22c06baf-6316-4531-8037-b8b77c401596.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.244121] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4550d1d-0d0d-4400-802c-bcd3a9e7edff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.254549] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 877.254549] env[68233]: value = "task-2782470" [ 877.254549] env[68233]: _type = "Task" [ 877.254549] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.266251] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.358174] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782467, 'name': ReconfigVM_Task, 'duration_secs': 0.149016} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.359386] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda5603e-bf51-4561-9fbf-77910e919f10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.364888] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edf17ed-5443-4950-8308-9028238c103a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.387055] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: e8ed7cfc-7ef1-4bc7-bebe-624f1ba69f67] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 877.388705] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:51:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='508f22af-e037-4878-8980-ab644bbabaa4',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1432814527',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.388926] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.389543] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.389770] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.390019] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.390159] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.390371] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.390531] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.390697] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.390862] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.391043] env[68233]: DEBUG nova.virt.hardware [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.392641] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14f9f85d-38f3-4f10-b373-696d38a60132 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.401370] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfeee1df-3f8e-4849-80a4-d56390ee00ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.407191] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 877.407191] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5271b4aa-7d26-fe2a-6d89-dd0870857f77" [ 877.407191] env[68233]: _type = "Task" [ 877.407191] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.442449] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfbe8bda-9bbe-4316-b9f7-1e02555cc718 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.449449] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5271b4aa-7d26-fe2a-6d89-dd0870857f77, 'name': SearchDatastore_Task, 'duration_secs': 0.009102} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.455299] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfiguring VM instance instance-00000030 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 877.459133] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-13d917a5-645b-4e1e-9b64-aeb38c6b7063 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.472446] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1749e65-8ac7-4f14-bceb-0adc53b3deee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.487966] env[68233]: DEBUG nova.compute.provider_tree [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.491239] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 877.491239] env[68233]: value = "task-2782471" [ 877.491239] env[68233]: _type = "Task" [ 877.491239] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.501262] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782471, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.576108] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782468, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.607989] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29619379-d01b-4939-a565-81ed80f44f70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.618555] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2baf37-07c0-4b69-beb6-feea28f079be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.658671] env[68233]: DEBUG nova.compute.manager [req-80f382ab-0be8-48c5-9e91-5ce970e7e921 req-80b64656-0ef7-4e03-bff0-bc03f12baf90 service nova] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Detach interface failed, port_id=d5ff7193-47a7-4f21-8f05-413e7d813756, reason: Instance 2c219b8c-813d-4155-af3b-327a7ebd75fc could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 877.661018] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 877.661018] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234246c-a0a8-55ee-66fa-a3e1d53d3b54" [ 877.661018] env[68233]: _type = "HttpNfcLease" [ 877.661018] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 877.661294] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 877.661294] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234246c-a0a8-55ee-66fa-a3e1d53d3b54" [ 877.661294] env[68233]: _type = "HttpNfcLease" [ 877.661294] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 877.662161] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d18ecb7-a9f6-443b-ad67-0a6d03303437 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.670120] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52187b95-09bf-483a-2793-b539d04a484d/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 877.670428] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52187b95-09bf-483a-2793-b539d04a484d/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 877.737175] env[68233]: DEBUG nova.compute.manager [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 877.737478] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 877.738527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9f2dcc-c49b-47c9-82a5-57c6d68ad5ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.746215] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 877.746494] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcee7fc9-9f25-44ce-a98e-c90147ae7fb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.753684] env[68233]: DEBUG oslo_vmware.api [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 877.753684] env[68233]: value = "task-2782472" [ 877.753684] env[68233]: _type = "Task" [ 877.753684] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.768434] env[68233]: DEBUG oslo_vmware.api [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782472, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.772381] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782470, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.816922] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-531de2b4-826c-4567-8f6b-b8a33693d2a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.894868] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 877.897608] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: eb5dc742-fa8f-4bac-89cb-afa57b5abe12] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 877.925191] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.925497] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.925662] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.928172] env[68233]: DEBUG nova.virt.hardware [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.928562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82da779a-e8d2-415c-82a7-6c9418ff27db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.937812] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb73c713-6e79-448b-a98f-d612578fd2b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.991107] env[68233]: DEBUG nova.scheduler.client.report [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.003681] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782471, 'name': ReconfigVM_Task, 'duration_secs': 0.265605} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.003974] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfigured VM instance instance-00000030 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 878.004802] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5b53ab-5de2-4b5f-81a2-475951326e95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.029290] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.029923] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdbac9fa-2f60-43c6-8b9a-49ce1b34a860 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.049945] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 878.049945] env[68233]: value = "task-2782473" [ 878.049945] env[68233]: _type = "Task" [ 878.049945] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.059921] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.797947] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.798375] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.798573] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.798809] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.799025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.802195] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.940s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.804983] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 878.811019] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782468, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.811019] env[68233]: WARNING oslo_vmware.common.loopingcall [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] task run outlasted interval by 0.24121400000000004 sec [ 878.811019] env[68233]: INFO nova.compute.manager [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Terminating instance [ 878.820854] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 878.820854] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Cleaning up deleted instances with incomplete migration {{(pid=68233) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 878.822830] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.637s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.823146] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.825930] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.695s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.826312] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.829033] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.252s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.831242] env[68233]: INFO nova.compute.claims [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.851618] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.787877} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.859972] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 22c06baf-6316-4531-8037-b8b77c401596/22c06baf-6316-4531-8037-b8b77c401596.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.860228] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.860798] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782473, 'name': ReconfigVM_Task, 'duration_secs': 0.277471} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.861023] env[68233]: DEBUG oslo_vmware.api [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782468, 'name': PowerOnVM_Task, 'duration_secs': 1.116421} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.861286] env[68233]: DEBUG oslo_vmware.api [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782472, 'name': PowerOffVM_Task, 'duration_secs': 0.20472} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.861496] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-040ed93d-bfa6-4d12-8ae9-27967ea1bdef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.863666] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7/72467d49-6fa8-42db-871e-4e50e77eedf7.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.864067] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 878.864276] env[68233]: INFO nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Took 9.55 seconds to spawn the instance on the hypervisor. [ 878.864470] env[68233]: DEBUG nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 878.864740] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 878.864900] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 878.865905] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c28d23-71fe-4e62-9627-6ba854b30ade {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.868987] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232fb9b6-a3ac-450c-bad6-e14a74ae5c43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.871891] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-301653f6-d4be-4cc4-b3f1-f9b8ca5ef967 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.874063] env[68233]: INFO nova.scheduler.client.report [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Deleted allocations for instance da2a5acb-0861-4225-a6b4-324482c480ea [ 878.880564] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 878.880564] env[68233]: value = "task-2782474" [ 878.880564] env[68233]: _type = "Task" [ 878.880564] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.900993] env[68233]: INFO nova.scheduler.client.report [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1 [ 878.908475] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d381e104-2395-4d4f-9d67-cd0c7a0fd7af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.917420] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.936420] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d32c5a8-7721-42fe-ad0d-e75336dafdfc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.960147] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d657e6a-5e0a-4f4b-af22-0085f73cb194 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.968680] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 878.968805] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78ea2ce3-6d82-479b-8bd3-306b7409e7df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.976598] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 878.976598] env[68233]: value = "task-2782476" [ 878.976598] env[68233]: _type = "Task" [ 878.976598] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.985319] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.991910] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 878.992308] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 878.992437] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleting the datastore file [datastore2] 03688e90-5433-47ca-baaa-75861ad093b7 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 878.992719] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3c1eb08-be42-49cc-877a-b7c671edfa28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.999468] env[68233]: DEBUG oslo_vmware.api [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 878.999468] env[68233]: value = "task-2782477" [ 878.999468] env[68233]: _type = "Task" [ 878.999468] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.008891] env[68233]: DEBUG oslo_vmware.api [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.238094] env[68233]: DEBUG nova.network.neutron [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Successfully updated port: 4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.245473] env[68233]: DEBUG nova.compute.manager [req-f8faa206-f600-470c-a667-e6206b05a152 req-9d4b61f3-2939-47ba-9b5a-f01a47d53292 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Received event network-vif-plugged-4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 879.245792] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8faa206-f600-470c-a667-e6206b05a152 req-9d4b61f3-2939-47ba-9b5a-f01a47d53292 service nova] Acquiring lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.246147] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8faa206-f600-470c-a667-e6206b05a152 req-9d4b61f3-2939-47ba-9b5a-f01a47d53292 service nova] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.246390] env[68233]: DEBUG oslo_concurrency.lockutils [req-f8faa206-f600-470c-a667-e6206b05a152 req-9d4b61f3-2939-47ba-9b5a-f01a47d53292 service nova] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.246667] env[68233]: DEBUG nova.compute.manager [req-f8faa206-f600-470c-a667-e6206b05a152 req-9d4b61f3-2939-47ba-9b5a-f01a47d53292 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] No waiting events found dispatching network-vif-plugged-4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 879.246878] env[68233]: WARNING nova.compute.manager [req-f8faa206-f600-470c-a667-e6206b05a152 req-9d4b61f3-2939-47ba-9b5a-f01a47d53292 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Received unexpected event network-vif-plugged-4969d072-296f-454b-9621-58f95b90a8dd for instance with vm_state building and task_state spawning. [ 879.311410] env[68233]: DEBUG nova.compute.utils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 879.313555] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 879.313801] env[68233]: DEBUG nova.network.neutron [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.323201] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 879.343349] env[68233]: DEBUG nova.compute.manager [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.343560] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.344732] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73bd2fcf-66dc-4d7f-99b3-9caa646b9d76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.355107] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.358744] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e3d1680-0b7a-4659-bcb2-aafd796aa02e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.373074] env[68233]: DEBUG oslo_vmware.api [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 879.373074] env[68233]: value = "task-2782478" [ 879.373074] env[68233]: _type = "Task" [ 879.373074] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.380790] env[68233]: DEBUG nova.policy [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95122ece8b8b445aa04349a675f262b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc7604c87d6485097fe5658d68217b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.390744] env[68233]: DEBUG oslo_vmware.api [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782478, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.391606] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c8977fd6-f59c-4949-a68c-8b834957275c tempest-ServersTestMultiNic-1127264459 tempest-ServersTestMultiNic-1127264459-project-member] Lock "da2a5acb-0861-4225-a6b4-324482c480ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.324s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.408912] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076198} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.409943] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b08fc269-138d-4f38-b4c3-875a506052b0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3d94d1b6-ba04-407d-9398-d4f7b21a7ee1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.817s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.410947] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 879.413083] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f522fa47-35c1-4e1e-b8c4-e7870fde71b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.444497] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 22c06baf-6316-4531-8037-b8b77c401596/22c06baf-6316-4531-8037-b8b77c401596.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 879.445610] env[68233]: INFO nova.compute.manager [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Took 53.35 seconds to build instance. [ 879.449967] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59361487-7687-44fe-bc0b-8c08ccfa5a75 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.469608] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd7edcf7-f41d-4a3f-9016-0e3e82dfb7c8 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.110s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.476765] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 879.476765] env[68233]: value = "task-2782479" [ 879.476765] env[68233]: _type = "Task" [ 879.476765] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.492964] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782479, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.496961] env[68233]: DEBUG oslo_vmware.api [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782476, 'name': PowerOnVM_Task, 'duration_secs': 0.402381} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.497441] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.510725] env[68233]: DEBUG oslo_vmware.api [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235754} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.511025] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 879.511719] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 879.511719] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 879.511822] env[68233]: INFO nova.compute.manager [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Took 1.77 seconds to destroy the instance on the hypervisor. [ 879.512109] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 879.512313] env[68233]: DEBUG nova.compute.manager [-] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 879.512415] env[68233]: DEBUG nova.network.neutron [-] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 879.742700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.742700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquired lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.742700] env[68233]: DEBUG nova.network.neutron [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 879.817885] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 879.891793] env[68233]: DEBUG oslo_vmware.api [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782478, 'name': PowerOffVM_Task, 'duration_secs': 0.397262} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.891793] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.891793] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.891793] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ba179e3-9140-4a13-97d4-06c5723118e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.969228] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.969481] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.970226] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Deleting the datastore file [datastore2] bb59f959-4cf8-4244-b7b4-6bf630a616b3 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.970512] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb3f6372-1907-4a29-930b-5a769a555959 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.979134] env[68233]: DEBUG oslo_vmware.api [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for the task: (returnval){ [ 879.979134] env[68233]: value = "task-2782481" [ 879.979134] env[68233]: _type = "Task" [ 879.979134] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.996140] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782479, 'name': ReconfigVM_Task, 'duration_secs': 0.330254} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.000704] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 22c06baf-6316-4531-8037-b8b77c401596/22c06baf-6316-4531-8037-b8b77c401596.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 880.002185] env[68233]: DEBUG oslo_vmware.api [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782481, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.002645] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b3b32de-533e-4e66-b901-b78364c546c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.019393] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 880.019393] env[68233]: value = "task-2782482" [ 880.019393] env[68233]: _type = "Task" [ 880.019393] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.031215] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782482, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.078563] env[68233]: DEBUG nova.network.neutron [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Successfully created port: ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.339679] env[68233]: DEBUG nova.network.neutron [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.464804] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ec5a7b-0bc9-4faa-b683-85dcf4b5709e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.476122] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcef278-5b5e-4bca-bb5d-c0d004f0e1c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.515444] env[68233]: INFO nova.compute.manager [None req-54791146-0a2b-4298-bcfe-b87eccb0e5a4 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance to original state: 'active' [ 880.522286] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec714a5-a179-4b21-a9a9-bf6f590fb564 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.537386] env[68233]: DEBUG oslo_vmware.api [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Task: {'id': task-2782481, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201902} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.543360] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.543614] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.543793] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.545492] env[68233]: INFO nova.compute.manager [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Took 1.20 seconds to destroy the instance on the hypervisor. [ 880.545805] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.546067] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782482, 'name': Rename_Task, 'duration_secs': 0.164995} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.546307] env[68233]: DEBUG nova.network.neutron [-] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.551027] env[68233]: DEBUG nova.compute.manager [-] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.551027] env[68233]: DEBUG nova.network.neutron [-] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.551027] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.552065] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a5c2c2-4466-4bb0-868e-854e2dfee132 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.556952] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-56f4a655-bcb9-48aa-9c0b-a97e611fd0cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.569879] env[68233]: DEBUG nova.compute.provider_tree [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.573700] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 880.573700] env[68233]: value = "task-2782483" [ 880.573700] env[68233]: _type = "Task" [ 880.573700] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.583154] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782483, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.711612] env[68233]: INFO nova.compute.manager [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Rescuing [ 880.711983] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.712213] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.712428] env[68233]: DEBUG nova.network.neutron [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.825255] env[68233]: DEBUG nova.network.neutron [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updating instance_info_cache with network_info: [{"id": "4969d072-296f-454b-9621-58f95b90a8dd", "address": "fa:16:3e:84:f6:ff", "network": {"id": "948796e1-4455-4621-a9ee-11fdd0b747af", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-380200332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1f138d0135943fb87024b943f2dba25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4969d072-29", "ovs_interfaceid": "4969d072-296f-454b-9621-58f95b90a8dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.829478] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 880.859737] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 880.859987] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.860161] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 880.860428] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.860512] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 880.860628] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 880.861039] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 880.861406] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 880.861512] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 880.861692] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 880.861868] env[68233]: DEBUG nova.virt.hardware [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 880.863203] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd38e42-b2d5-4d37-b708-fcdf24e67f24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.871839] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12871d4a-5228-4966-ae09-683484222d83 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.057427] env[68233]: INFO nova.compute.manager [-] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Took 1.54 seconds to deallocate network for instance. [ 881.075603] env[68233]: DEBUG nova.scheduler.client.report [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 881.090918] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782483, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.332022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Releasing lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 881.332022] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Instance network_info: |[{"id": "4969d072-296f-454b-9621-58f95b90a8dd", "address": "fa:16:3e:84:f6:ff", "network": {"id": "948796e1-4455-4621-a9ee-11fdd0b747af", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-380200332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1f138d0135943fb87024b943f2dba25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4969d072-29", "ovs_interfaceid": "4969d072-296f-454b-9621-58f95b90a8dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 881.332022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:f6:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4969d072-296f-454b-9621-58f95b90a8dd', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 881.338751] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Creating folder: Project (f1f138d0135943fb87024b943f2dba25). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.339314] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-438a26a5-9012-4dbf-ac33-5791c3c68d29 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.351861] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Created folder: Project (f1f138d0135943fb87024b943f2dba25) in parent group-v559223. [ 881.354019] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Creating folder: Instances. Parent ref: group-v559413. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 881.354019] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb385a12-a599-488d-ada5-4689dd5c46b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.362852] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Created folder: Instances in parent group-v559413. [ 881.365016] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 881.365016] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 881.365016] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-440a0b00-0d6e-40a5-ace1-b277f6f3eb07 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.387023] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 881.387023] env[68233]: value = "task-2782486" [ 881.387023] env[68233]: _type = "Task" [ 881.387023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.394510] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782486, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.567856] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.586028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.757s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.586458] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 881.589972] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.895s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.591924] env[68233]: INFO nova.compute.claims [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 881.600478] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782483, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.648810] env[68233]: DEBUG nova.network.neutron [-] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.700803] env[68233]: DEBUG nova.compute.manager [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Received event network-changed-4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 881.701238] env[68233]: DEBUG nova.compute.manager [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Refreshing instance network info cache due to event network-changed-4969d072-296f-454b-9621-58f95b90a8dd. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 881.701508] env[68233]: DEBUG oslo_concurrency.lockutils [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] Acquiring lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.701692] env[68233]: DEBUG oslo_concurrency.lockutils [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] Acquired lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.701933] env[68233]: DEBUG nova.network.neutron [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Refreshing network info cache for port 4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 881.719829] env[68233]: DEBUG nova.network.neutron [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.812660] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "3f79709a-b8b7-4838-8731-d051155ff4f3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.813022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.897817] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782486, 'name': CreateVM_Task, 'duration_secs': 0.485101} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.898165] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.898705] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.898861] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 881.899238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 881.899504] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-466ab94d-ccf3-42d4-80bc-ffb4be4cd3da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.904603] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 881.904603] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524b964c-59bd-e4c9-f8c8-b65eba6fd6b4" [ 881.904603] env[68233]: _type = "Task" [ 881.904603] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.914806] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524b964c-59bd-e4c9-f8c8-b65eba6fd6b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.093235] env[68233]: DEBUG oslo_vmware.api [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782483, 'name': PowerOnVM_Task, 'duration_secs': 1.315714} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.094291] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 882.094291] env[68233]: INFO nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Took 10.14 seconds to spawn the instance on the hypervisor. [ 882.094291] env[68233]: DEBUG nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.094887] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7607e3-ea32-4f08-9421-e00c218fd0b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.099793] env[68233]: DEBUG nova.compute.utils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 882.103996] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 882.104331] env[68233]: DEBUG nova.network.neutron [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 882.153532] env[68233]: INFO nova.compute.manager [-] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Took 1.61 seconds to deallocate network for instance. [ 882.184071] env[68233]: DEBUG nova.policy [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ce9112ab6ee4f9f87bd665884da6c1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f693e1f45b0d4fc0b871ae4dd2df6c4e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 882.227158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.315780] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.418470] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524b964c-59bd-e4c9-f8c8-b65eba6fd6b4, 'name': SearchDatastore_Task, 'duration_secs': 0.017826} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.418803] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.419591] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 882.419591] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.419591] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.419818] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.419973] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b385fdb-4b1d-4972-9d27-e725f1c2ffd6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.430094] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.430340] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 882.432243] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a3e6519-51f0-4447-8627-e6664ed37662 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.438866] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 882.438866] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5217350b-d19e-87a6-c657-004242e336dc" [ 882.438866] env[68233]: _type = "Task" [ 882.438866] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.449879] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5217350b-d19e-87a6-c657-004242e336dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.604594] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 882.630098] env[68233]: INFO nova.compute.manager [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Took 51.47 seconds to build instance. [ 882.666266] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.815812] env[68233]: DEBUG nova.network.neutron [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Successfully updated port: ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.838939] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.928892] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "72467d49-6fa8-42db-871e-4e50e77eedf7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.929572] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.929572] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.929751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.930176] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.936404] env[68233]: INFO nova.compute.manager [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Terminating instance [ 882.954756] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5217350b-d19e-87a6-c657-004242e336dc, 'name': SearchDatastore_Task, 'duration_secs': 0.01243} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.957217] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d81d6337-3019-4599-bc2d-f1e8718a127b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.964087] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 882.964087] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525e4eca-1c53-c40d-1e6f-74c3c417e7b9" [ 882.964087] env[68233]: _type = "Task" [ 882.964087] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.976260] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525e4eca-1c53-c40d-1e6f-74c3c417e7b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.049328] env[68233]: DEBUG nova.network.neutron [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updated VIF entry in instance network info cache for port 4969d072-296f-454b-9621-58f95b90a8dd. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 883.051200] env[68233]: DEBUG nova.network.neutron [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updating instance_info_cache with network_info: [{"id": "4969d072-296f-454b-9621-58f95b90a8dd", "address": "fa:16:3e:84:f6:ff", "network": {"id": "948796e1-4455-4621-a9ee-11fdd0b747af", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-380200332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1f138d0135943fb87024b943f2dba25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4969d072-29", "ovs_interfaceid": "4969d072-296f-454b-9621-58f95b90a8dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.134630] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96e7e5f-a404-4069-810c-05331ea6e400 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "22c06baf-6316-4531-8037-b8b77c401596" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.267s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.201655] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2f4c86-ad5d-42d6-844e-0ae9e7d75b77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.211560] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6626aa2-1ca3-4091-bc9d-392cda25aea1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.245239] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93eb8931-224d-4ba2-830a-8a145305e427 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.253799] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f882eb38-7601-4c91-9026-da2e4eef4570 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.274017] env[68233]: DEBUG nova.compute.provider_tree [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 883.277092] env[68233]: DEBUG nova.network.neutron [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Successfully created port: f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.318291] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.318713] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.319044] env[68233]: DEBUG nova.network.neutron [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.443586] env[68233]: DEBUG nova.compute.manager [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.443732] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.444647] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbd2963-c66b-47a3-9b19-19075069c397 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.452729] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.452985] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52bcd293-b96a-444f-acaf-258671d6e268 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.461276] env[68233]: DEBUG oslo_vmware.api [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 883.461276] env[68233]: value = "task-2782487" [ 883.461276] env[68233]: _type = "Task" [ 883.461276] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.480239] env[68233]: DEBUG oslo_vmware.api [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.480405] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525e4eca-1c53-c40d-1e6f-74c3c417e7b9, 'name': SearchDatastore_Task, 'duration_secs': 0.023} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.480708] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.480819] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] d0d6eed0-db5b-4371-8f03-b3415fd833f0/d0d6eed0-db5b-4371-8f03-b3415fd833f0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 883.481339] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-875cc461-3551-4880-a7aa-f42d0b98a5d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.488734] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 883.488734] env[68233]: value = "task-2782488" [ 883.488734] env[68233]: _type = "Task" [ 883.488734] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.497559] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.555987] env[68233]: DEBUG oslo_concurrency.lockutils [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] Releasing lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.555987] env[68233]: DEBUG nova.compute.manager [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Received event network-vif-deleted-1d7d2812-9e77-463b-be77-4b04e6365498 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 883.555987] env[68233]: DEBUG nova.compute.manager [req-fd8df5a5-75fd-4cd9-af3e-7329020e0b62 req-fbcec528-e16d-47b3-ad9d-13258c2e80b0 service nova] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Received event network-vif-deleted-e3cadc37-6c3d-4281-982a-738d06c7f97a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 883.616209] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 883.652694] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 883.652927] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 883.653115] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 883.653293] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 883.653460] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 883.653648] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 883.653904] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 883.654095] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 883.654286] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 883.654805] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 883.654805] env[68233]: DEBUG nova.virt.hardware [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 883.657517] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f72022-f61c-41b5-94e4-ce13013d49a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.669505] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa66c839-bd28-45b0-8327-066a861c2975 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.781292] env[68233]: DEBUG nova.scheduler.client.report [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.784878] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.785158] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3d1f830-bed7-45a0-8f0d-745d4ab36bf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.794239] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 883.794239] env[68233]: value = "task-2782489" [ 883.794239] env[68233]: _type = "Task" [ 883.794239] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.805934] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.854728] env[68233]: DEBUG nova.compute.manager [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Received event network-vif-plugged-ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 883.854955] env[68233]: DEBUG oslo_concurrency.lockutils [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] Acquiring lock "4677d047-f8dc-4501-be9b-14e6a2222f46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.855205] env[68233]: DEBUG oslo_concurrency.lockutils [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.855401] env[68233]: DEBUG oslo_concurrency.lockutils [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.855531] env[68233]: DEBUG nova.compute.manager [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] No waiting events found dispatching network-vif-plugged-ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 883.855776] env[68233]: WARNING nova.compute.manager [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Received unexpected event network-vif-plugged-ef237162-2628-4a17-9afd-7a418911f222 for instance with vm_state building and task_state spawning. [ 883.855893] env[68233]: DEBUG nova.compute.manager [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Received event network-changed-ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 883.856198] env[68233]: DEBUG nova.compute.manager [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Refreshing instance network info cache due to event network-changed-ef237162-2628-4a17-9afd-7a418911f222. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 883.857756] env[68233]: DEBUG oslo_concurrency.lockutils [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] Acquiring lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.905926] env[68233]: DEBUG nova.network.neutron [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.971435] env[68233]: DEBUG oslo_vmware.api [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782487, 'name': PowerOffVM_Task, 'duration_secs': 0.240229} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.971756] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.971887] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.972168] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42be86b8-5f5c-476c-85f7-783a01a92857 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.002742] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782488, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.230248] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.230629] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.231099] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleting the datastore file [datastore1] 72467d49-6fa8-42db-871e-4e50e77eedf7 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.231099] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-84b9bf92-aa8f-4fbe-8517-f8802e060028 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.237960] env[68233]: DEBUG oslo_vmware.api [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 884.237960] env[68233]: value = "task-2782491" [ 884.237960] env[68233]: _type = "Task" [ 884.237960] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.248607] env[68233]: DEBUG oslo_vmware.api [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782491, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.292714] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.701s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.292714] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 884.297598] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.487s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.299213] env[68233]: INFO nova.compute.claims [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 884.314400] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782489, 'name': PowerOffVM_Task, 'duration_secs': 0.483982} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.314400] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 884.314694] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a30ca3b-620d-4591-beac-914be876993e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.340738] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "22c06baf-6316-4531-8037-b8b77c401596" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.341082] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "22c06baf-6316-4531-8037-b8b77c401596" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.341294] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "22c06baf-6316-4531-8037-b8b77c401596-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.341495] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "22c06baf-6316-4531-8037-b8b77c401596-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.341675] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "22c06baf-6316-4531-8037-b8b77c401596-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.344316] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae05fe4-cf6e-480a-86e2-65fda20e699c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.348144] env[68233]: INFO nova.compute.manager [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Terminating instance [ 884.395739] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.396121] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57c897f5-31a5-413e-8d2b-d7ae8e16a5de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.403616] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 884.403616] env[68233]: value = "task-2782492" [ 884.403616] env[68233]: _type = "Task" [ 884.403616] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.408538] env[68233]: DEBUG nova.network.neutron [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updating instance_info_cache with network_info: [{"id": "ef237162-2628-4a17-9afd-7a418911f222", "address": "fa:16:3e:33:8a:2c", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef237162-26", "ovs_interfaceid": "ef237162-2628-4a17-9afd-7a418911f222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.418117] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 884.418117] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.418117] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.418117] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.418117] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.418399] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b74fa2d3-ffd3-4fea-86e3-9c2851c5f871 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.428661] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.428877] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.429712] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6b8566e-7ae8-4c42-98f6-01cc41d1fe79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.436417] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 884.436417] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b61429-222c-e2ce-709d-cd6ee6ae4dc1" [ 884.436417] env[68233]: _type = "Task" [ 884.436417] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.447287] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b61429-222c-e2ce-709d-cd6ee6ae4dc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.500976] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782488, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.753514} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.501326] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] d0d6eed0-db5b-4371-8f03-b3415fd833f0/d0d6eed0-db5b-4371-8f03-b3415fd833f0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 884.501547] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 884.501803] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9aa9a2d0-7e90-433b-9bc5-2d5c5f621bcb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.508462] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 884.508462] env[68233]: value = "task-2782493" [ 884.508462] env[68233]: _type = "Task" [ 884.508462] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.519790] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.751465] env[68233]: DEBUG oslo_vmware.api [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782491, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228266} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.751726] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.751901] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.752111] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.752289] env[68233]: INFO nova.compute.manager [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Took 1.31 seconds to destroy the instance on the hypervisor. [ 884.752552] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.752743] env[68233]: DEBUG nova.compute.manager [-] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 884.752836] env[68233]: DEBUG nova.network.neutron [-] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 884.802749] env[68233]: DEBUG nova.compute.utils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 884.804136] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 884.804335] env[68233]: DEBUG nova.network.neutron [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 884.856777] env[68233]: DEBUG nova.compute.manager [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 884.857040] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 884.857940] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a13285-02a6-4ab9-8cfd-638b6c1ca9d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.865794] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 884.866012] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b733d446-1821-48e8-8a65-e2eec5b59146 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.873856] env[68233]: DEBUG oslo_vmware.api [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 884.873856] env[68233]: value = "task-2782494" [ 884.873856] env[68233]: _type = "Task" [ 884.873856] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.884092] env[68233]: DEBUG oslo_vmware.api [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.916104] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.916104] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance network_info: |[{"id": "ef237162-2628-4a17-9afd-7a418911f222", "address": "fa:16:3e:33:8a:2c", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef237162-26", "ovs_interfaceid": "ef237162-2628-4a17-9afd-7a418911f222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 884.916104] env[68233]: DEBUG oslo_concurrency.lockutils [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] Acquired lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.916104] env[68233]: DEBUG nova.network.neutron [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Refreshing network info cache for port ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 884.916104] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:8a:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef237162-2628-4a17-9afd-7a418911f222', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.923261] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 884.924388] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.924719] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e00e6ff0-be6e-4246-a8ce-ba0d1a145324 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.944572] env[68233]: DEBUG nova.policy [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e45602ffbf4d66b6bfcac59f078e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd32ae322ad5641b4bebd1aa390b5914f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 884.959430] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b61429-222c-e2ce-709d-cd6ee6ae4dc1, 'name': SearchDatastore_Task, 'duration_secs': 0.012628} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.961383] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.961383] env[68233]: value = "task-2782495" [ 884.961383] env[68233]: _type = "Task" [ 884.961383] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.961592] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae20c817-074c-422a-a7d7-b1dac9b80cf8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.974847] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 884.974847] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5209e472-ab56-ca4e-384d-6dbe66eab391" [ 884.974847] env[68233]: _type = "Task" [ 884.974847] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.978120] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782495, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.986097] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5209e472-ab56-ca4e-384d-6dbe66eab391, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.020124] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12013} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.020508] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 885.021600] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95ad1ae-7768-42fb-86b7-ed3ec85530f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.050241] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] d0d6eed0-db5b-4371-8f03-b3415fd833f0/d0d6eed0-db5b-4371-8f03-b3415fd833f0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 885.050366] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d27aee13-74ee-4000-99dd-665faae007f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.070349] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 885.070349] env[68233]: value = "task-2782496" [ 885.070349] env[68233]: _type = "Task" [ 885.070349] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.080241] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782496, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.310659] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 885.385799] env[68233]: DEBUG oslo_vmware.api [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782494, 'name': PowerOffVM_Task, 'duration_secs': 0.226956} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.389118] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.389493] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.390110] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1fb1ecf0-69d9-4ebc-aa56-2674ceb072ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.460907] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.461185] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.461385] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleting the datastore file [datastore1] 22c06baf-6316-4531-8037-b8b77c401596 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.461722] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-90e1e07a-8256-470b-88bf-c7bc00d607d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.476131] env[68233]: DEBUG oslo_vmware.api [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for the task: (returnval){ [ 885.476131] env[68233]: value = "task-2782498" [ 885.476131] env[68233]: _type = "Task" [ 885.476131] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.484200] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782495, 'name': CreateVM_Task, 'duration_secs': 0.406668} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.490798] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.492039] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.492342] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.492587] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 885.496770] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f344c81d-04e3-474f-84ef-b1dc05d914df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.498982] env[68233]: DEBUG oslo_vmware.api [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782498, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.504619] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5209e472-ab56-ca4e-384d-6dbe66eab391, 'name': SearchDatastore_Task, 'duration_secs': 0.01859} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.508279] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.508777] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. {{(pid=68233) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 885.509365] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2fa9bb75-470c-4527-9174-5ed96acf6704 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.519308] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 885.519308] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241ac8d-8b71-71e9-8935-477a9ed38ab0" [ 885.519308] env[68233]: _type = "Task" [ 885.519308] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.534217] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 885.534217] env[68233]: value = "task-2782499" [ 885.534217] env[68233]: _type = "Task" [ 885.534217] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.544764] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241ac8d-8b71-71e9-8935-477a9ed38ab0, 'name': SearchDatastore_Task, 'duration_secs': 0.013829} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.547458] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.547748] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.547993] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.548153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.548331] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.552305] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62501475-481b-44a0-bff8-2a642882f6b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.554659] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.562846] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.562846] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.563683] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6004ccac-4bd8-494c-a6fe-53c05d785f82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.568943] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 885.568943] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52043661-5e6e-dce7-9e28-6fdc853e737d" [ 885.568943] env[68233]: _type = "Task" [ 885.568943] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.584065] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52043661-5e6e-dce7-9e28-6fdc853e737d, 'name': SearchDatastore_Task, 'duration_secs': 0.012136} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.592607] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782496, 'name': ReconfigVM_Task, 'duration_secs': 0.323842} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.595235] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48e85bc4-f0c8-4d54-b32b-e88f616403fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.597818] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Reconfigured VM instance instance-00000046 to attach disk [datastore1] d0d6eed0-db5b-4371-8f03-b3415fd833f0/d0d6eed0-db5b-4371-8f03-b3415fd833f0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 885.598671] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81f6c4b0-bee6-480f-8930-cbcff07d3b98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.603161] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 885.603161] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52749505-c2cd-294a-0f10-2ba6be100e82" [ 885.603161] env[68233]: _type = "Task" [ 885.603161] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.609705] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 885.609705] env[68233]: value = "task-2782500" [ 885.609705] env[68233]: _type = "Task" [ 885.609705] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.618993] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52749505-c2cd-294a-0f10-2ba6be100e82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.623622] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782500, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.714564] env[68233]: DEBUG nova.network.neutron [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updated VIF entry in instance network info cache for port ef237162-2628-4a17-9afd-7a418911f222. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 885.714939] env[68233]: DEBUG nova.network.neutron [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updating instance_info_cache with network_info: [{"id": "ef237162-2628-4a17-9afd-7a418911f222", "address": "fa:16:3e:33:8a:2c", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef237162-26", "ovs_interfaceid": "ef237162-2628-4a17-9afd-7a418911f222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.904935] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687e0411-785b-4cb0-9369-c551ed429a8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.914587] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae51ab02-8511-4dc6-92f0-2de93cc60d78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.951595] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a78175-a9cf-4fcb-8bac-9a89aff8f0f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.962645] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099568c3-5f99-4f6d-a6d0-64dd74e91f12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.969865] env[68233]: DEBUG nova.network.neutron [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Successfully created port: 8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 885.981235] env[68233]: DEBUG nova.compute.provider_tree [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.983475] env[68233]: DEBUG nova.network.neutron [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Successfully updated port: f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 885.995779] env[68233]: DEBUG oslo_vmware.api [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Task: {'id': task-2782498, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.204072} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.995779] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.995779] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.995779] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.995779] env[68233]: INFO nova.compute.manager [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Took 1.14 seconds to destroy the instance on the hypervisor. [ 885.995779] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.995779] env[68233]: DEBUG nova.compute.manager [-] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.995779] env[68233]: DEBUG nova.network.neutron [-] [instance: 22c06baf-6316-4531-8037-b8b77c401596] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.049393] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782499, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.116867] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52749505-c2cd-294a-0f10-2ba6be100e82, 'name': SearchDatastore_Task, 'duration_secs': 0.016832} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.120476] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.120786] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.121322] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97a6bc04-6a5e-4f96-97fa-8700d1e32409 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.128420] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782500, 'name': Rename_Task, 'duration_secs': 0.170962} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.129809] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 886.130164] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 886.130164] env[68233]: value = "task-2782501" [ 886.130164] env[68233]: _type = "Task" [ 886.130164] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.130426] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbff2069-1c0a-44b8-8df4-f77b7e6b6930 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.141589] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782501, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.143199] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 886.143199] env[68233]: value = "task-2782502" [ 886.143199] env[68233]: _type = "Task" [ 886.143199] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.156030] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.168329] env[68233]: DEBUG nova.network.neutron [-] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.217694] env[68233]: DEBUG oslo_concurrency.lockutils [req-331d8dea-cc38-48c9-891c-d54fd6e5d6e4 req-fd36e56f-e34b-41e6-8488-4c88c93746a1 service nova] Releasing lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.326902] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 886.354866] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 886.355136] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 886.355301] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 886.355486] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 886.355635] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 886.355784] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 886.355998] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 886.356174] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 886.356463] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 886.356636] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 886.356810] env[68233]: DEBUG nova.virt.hardware [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 886.357709] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab40aa9-8b9d-45d8-923c-49b7fe91630f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.366396] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd69f257-6341-41e9-b1ad-1c6153363134 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.489617] env[68233]: DEBUG nova.scheduler.client.report [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 886.497027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.497027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 886.497027] env[68233]: DEBUG nova.network.neutron [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 886.551981] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590165} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.552575] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. [ 886.556359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4613eb-40b9-47ec-9999-a3f8e5d3390e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.595620] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.595620] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7848e200-75f8-4912-803a-b60fe13f5f2f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.610649] env[68233]: DEBUG nova.compute.manager [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Received event network-vif-deleted-1c30459d-e88b-42bd-8073-04aa89cecbc3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 886.610890] env[68233]: DEBUG nova.compute.manager [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Received event network-vif-plugged-f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 886.611343] env[68233]: DEBUG oslo_concurrency.lockutils [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] Acquiring lock "3cca16e1-3363-4026-9359-4ed2ba41e25d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.611459] env[68233]: DEBUG oslo_concurrency.lockutils [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.611603] env[68233]: DEBUG oslo_concurrency.lockutils [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.611831] env[68233]: DEBUG nova.compute.manager [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] No waiting events found dispatching network-vif-plugged-f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 886.612705] env[68233]: WARNING nova.compute.manager [req-70c94d72-7e62-48e6-81e8-02055fe92d79 req-11c89a0d-4938-4ed0-b3e7-ad56c2175663 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Received unexpected event network-vif-plugged-f3dc9bf3-0d50-44df-89a6-52cfc6899cad for instance with vm_state building and task_state spawning. [ 886.619429] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 886.619429] env[68233]: value = "task-2782503" [ 886.619429] env[68233]: _type = "Task" [ 886.619429] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.629084] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782503, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.641566] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782501, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.653708] env[68233]: DEBUG oslo_vmware.api [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782502, 'name': PowerOnVM_Task, 'duration_secs': 0.506697} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.654044] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 886.654241] env[68233]: INFO nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Took 8.76 seconds to spawn the instance on the hypervisor. [ 886.655230] env[68233]: DEBUG nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.656527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b2fcad-cd6c-417e-91b4-38bc26e71420 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.671399] env[68233]: INFO nova.compute.manager [-] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Took 1.92 seconds to deallocate network for instance. [ 886.735180] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.735431] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "73ca71c0-34cd-4393-82ff-4b297d350209" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.996615] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.996932] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 887.003121] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.809s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.003121] env[68233]: DEBUG nova.objects.instance [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lazy-loading 'resources' on Instance uuid f7a1bfc5-7141-4764-b3fe-08d06020209a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 887.037752] env[68233]: DEBUG nova.network.neutron [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 887.130510] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782503, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.142316] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782501, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540956} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.142316] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.142316] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.143427] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a07f153d-ef2d-4cd2-9ee5-8678a970cd15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.150108] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 887.150108] env[68233]: value = "task-2782504" [ 887.150108] env[68233]: _type = "Task" [ 887.150108] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.159650] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.181456] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.182151] env[68233]: INFO nova.compute.manager [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Took 49.30 seconds to build instance. [ 887.184929] env[68233]: DEBUG nova.network.neutron [-] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.237742] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 887.502628] env[68233]: DEBUG nova.compute.utils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 887.504337] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 887.504337] env[68233]: DEBUG nova.network.neutron [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 887.529361] env[68233]: DEBUG nova.network.neutron [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Updating instance_info_cache with network_info: [{"id": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "address": "fa:16:3e:39:39:47", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dc9bf3-0d", "ovs_interfaceid": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.573623] env[68233]: DEBUG nova.policy [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6f4dee9aa3640bd88dbc66a497ee3d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '978c6dbf1c10443da3253a58f1e5bdea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 887.631886] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782503, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.659891] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130471} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.662792] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.664883] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13035a6c-0eb3-4826-a87f-881dbaa10fb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.689841] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.693479] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a07d44c-935d-45cb-9339-ca62a1314afb tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.575s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.693740] env[68233]: INFO nova.compute.manager [-] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Took 1.70 seconds to deallocate network for instance. [ 887.694443] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c52cecfb-83b1-4231-bc38-d39eb7929303 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.720851] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 887.720851] env[68233]: value = "task-2782505" [ 887.720851] env[68233]: _type = "Task" [ 887.720851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.733644] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782505, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.773290] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.893194] env[68233]: DEBUG nova.network.neutron [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Successfully created port: d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.955342] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52187b95-09bf-483a-2793-b539d04a484d/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 887.956281] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76176f4-a68f-49fb-9759-71f756488127 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.965811] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52187b95-09bf-483a-2793-b539d04a484d/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 887.965811] env[68233]: ERROR oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52187b95-09bf-483a-2793-b539d04a484d/disk-0.vmdk due to incomplete transfer. [ 887.965811] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-23ba5d12-54f1-4dec-ba91-0403173edbd6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.974722] env[68233]: DEBUG oslo_vmware.rw_handles [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52187b95-09bf-483a-2793-b539d04a484d/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 887.974958] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Uploaded image f91c1add-305a-4e51-ac18-5438e9f1375a to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 887.977516] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 887.977795] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fd7ca1cc-8f1f-4752-825d-0a9e0807d90e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.983705] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 887.983705] env[68233]: value = "task-2782506" [ 887.983705] env[68233]: _type = "Task" [ 887.983705] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.991958] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782506, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.011468] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 888.033930] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 888.034261] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Instance network_info: |[{"id": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "address": "fa:16:3e:39:39:47", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dc9bf3-0d", "ovs_interfaceid": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 888.034906] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:39:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3dc9bf3-0d50-44df-89a6-52cfc6899cad', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 888.044163] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 888.044740] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 888.044980] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-769ad151-fb3f-42f9-8e24-616434ff56fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.069438] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 888.069438] env[68233]: value = "task-2782507" [ 888.069438] env[68233]: _type = "Task" [ 888.069438] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.077940] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782507, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.136139] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782503, 'name': ReconfigVM_Task, 'duration_secs': 1.509876} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.136560] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.139724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380383b1-dce1-44af-a703-d0f762cd95cd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.174411] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4581778a-13db-4c1b-bf69-8c2a42ccbca2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.190789] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 888.190789] env[68233]: value = "task-2782508" [ 888.190789] env[68233]: _type = "Task" [ 888.190789] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.204333] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782508, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.217750] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.240249] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782505, 'name': ReconfigVM_Task, 'duration_secs': 0.312908} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.240367] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.243676] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-703e2170-b85c-48d3-9582-6b5549d520f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.252399] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 888.252399] env[68233]: value = "task-2782509" [ 888.252399] env[68233]: _type = "Task" [ 888.252399] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.261499] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782509, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.289452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b40194-7c66-4408-b6f7-702cfbc9ce7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.297563] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d5f895b-1bd5-4133-b61e-43093ba74dd5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.333091] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fd6c16-5518-47e5-bf0b-a51059496bc9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.342264] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8c48b7-7e15-4add-926a-f52ee5c2b284 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.357095] env[68233]: DEBUG nova.compute.provider_tree [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 888.493544] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782506, 'name': Destroy_Task, 'duration_secs': 0.472307} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.493823] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Destroyed the VM [ 888.494075] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 888.494338] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7ab9b313-3f64-40b7-998b-caa61e6c81fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.502261] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 888.502261] env[68233]: value = "task-2782510" [ 888.502261] env[68233]: _type = "Task" [ 888.502261] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.513385] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782510, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.579599] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782507, 'name': CreateVM_Task, 'duration_secs': 0.340641} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.579715] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 888.581634] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.581634] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.581634] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 888.582096] env[68233]: DEBUG nova.network.neutron [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Successfully updated port: 8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.583574] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98a0c7c0-43d1-4c0c-bb44-361939402820 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.589676] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 888.589676] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5200ec49-71ec-aab7-72a5-4f54026f7234" [ 888.589676] env[68233]: _type = "Task" [ 888.589676] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.600258] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5200ec49-71ec-aab7-72a5-4f54026f7234, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.617124] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Received event network-changed-f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 888.617322] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Refreshing instance network info cache due to event network-changed-f3dc9bf3-0d50-44df-89a6-52cfc6899cad. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 888.617565] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Acquiring lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 888.617678] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Acquired lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 888.617835] env[68233]: DEBUG nova.network.neutron [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Refreshing network info cache for port f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 888.703230] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782508, 'name': ReconfigVM_Task, 'duration_secs': 0.210596} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.703755] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.703979] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31ddbbc5-7bcd-4ab9-a9fe-7b1bd20d9090 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.709604] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 888.709604] env[68233]: value = "task-2782511" [ 888.709604] env[68233]: _type = "Task" [ 888.709604] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.717275] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782511, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.764150] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782509, 'name': Rename_Task, 'duration_secs': 0.200523} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.764267] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.764503] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18f93fe7-d928-417e-a2b9-9e0c562a5cea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.771271] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 888.771271] env[68233]: value = "task-2782512" [ 888.771271] env[68233]: _type = "Task" [ 888.771271] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.779643] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782512, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.863034] env[68233]: DEBUG nova.scheduler.client.report [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 889.019251] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782510, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.020608] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 889.054198] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 889.054499] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 889.054698] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 889.054943] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 889.055157] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 889.055349] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 889.055607] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 889.055792] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 889.056226] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 889.056226] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 889.056363] env[68233]: DEBUG nova.virt.hardware [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 889.057752] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606e2999-b51c-44af-acf0-2faf9487e064 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.065982] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a37a0b-e1cf-42dd-a33c-5af0c8442b59 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.086614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-c5c8bf0c-eb58-41bc-a316-b4ac78187658" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.087306] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-c5c8bf0c-eb58-41bc-a316-b4ac78187658" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.087546] env[68233]: DEBUG nova.network.neutron [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.101353] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5200ec49-71ec-aab7-72a5-4f54026f7234, 'name': SearchDatastore_Task, 'duration_secs': 0.009403} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.101700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.101965] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 889.102277] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.102542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.102768] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 889.103039] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb538329-19f6-434e-a4bc-9363f26ed669 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.111919] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 889.112164] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 889.112965] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-350706a6-c9a5-4165-9757-1dd5bc6babfb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.119108] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 889.119108] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52194f1e-6565-fd68-f213-7038f20cdeef" [ 889.119108] env[68233]: _type = "Task" [ 889.119108] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.133219] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52194f1e-6565-fd68-f213-7038f20cdeef, 'name': SearchDatastore_Task, 'duration_secs': 0.008816} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.134264] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d090000-c941-4c8a-b6dc-3a11475aaa03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.141644] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 889.141644] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5243c0b2-6e86-83b1-98ac-ce909d2a42c5" [ 889.141644] env[68233]: _type = "Task" [ 889.141644] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.150858] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5243c0b2-6e86-83b1-98ac-ce909d2a42c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.222884] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782511, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.281179] env[68233]: DEBUG oslo_vmware.api [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782512, 'name': PowerOnVM_Task, 'duration_secs': 0.503318} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.281518] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.281730] env[68233]: INFO nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Took 8.45 seconds to spawn the instance on the hypervisor. [ 889.281921] env[68233]: DEBUG nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.285125] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237d0d27-9a4d-4ebf-86ca-43c290ea5f1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.368889] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.366s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.372430] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.340s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.372633] env[68233]: DEBUG nova.objects.instance [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 889.393980] env[68233]: DEBUG nova.compute.manager [req-61dc6e1b-9b80-40e7-a60a-3d15f77e5f75 req-2d6d0c3f-f319-46a2-9d3b-e21e1ea9e6da service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-vif-plugged-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 889.394416] env[68233]: DEBUG oslo_concurrency.lockutils [req-61dc6e1b-9b80-40e7-a60a-3d15f77e5f75 req-2d6d0c3f-f319-46a2-9d3b-e21e1ea9e6da service nova] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.394537] env[68233]: DEBUG oslo_concurrency.lockutils [req-61dc6e1b-9b80-40e7-a60a-3d15f77e5f75 req-2d6d0c3f-f319-46a2-9d3b-e21e1ea9e6da service nova] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.395184] env[68233]: DEBUG oslo_concurrency.lockutils [req-61dc6e1b-9b80-40e7-a60a-3d15f77e5f75 req-2d6d0c3f-f319-46a2-9d3b-e21e1ea9e6da service nova] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.395184] env[68233]: DEBUG nova.compute.manager [req-61dc6e1b-9b80-40e7-a60a-3d15f77e5f75 req-2d6d0c3f-f319-46a2-9d3b-e21e1ea9e6da service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] No waiting events found dispatching network-vif-plugged-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.395184] env[68233]: WARNING nova.compute.manager [req-61dc6e1b-9b80-40e7-a60a-3d15f77e5f75 req-2d6d0c3f-f319-46a2-9d3b-e21e1ea9e6da service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received unexpected event network-vif-plugged-d9478083-21a3-4b61-ab65-e1281b8bac7b for instance with vm_state building and task_state spawning. [ 889.399544] env[68233]: INFO nova.scheduler.client.report [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Deleted allocations for instance f7a1bfc5-7141-4764-b3fe-08d06020209a [ 889.447880] env[68233]: DEBUG nova.network.neutron [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Updated VIF entry in instance network info cache for port f3dc9bf3-0d50-44df-89a6-52cfc6899cad. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 889.448427] env[68233]: DEBUG nova.network.neutron [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Updating instance_info_cache with network_info: [{"id": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "address": "fa:16:3e:39:39:47", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dc9bf3-0d", "ovs_interfaceid": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.513963] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782510, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.527188] env[68233]: DEBUG nova.network.neutron [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Successfully updated port: d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.626055] env[68233]: DEBUG nova.network.neutron [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 889.655309] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5243c0b2-6e86-83b1-98ac-ce909d2a42c5, 'name': SearchDatastore_Task, 'duration_secs': 0.009073} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.655572] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.656077] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/3cca16e1-3363-4026-9359-4ed2ba41e25d.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 889.656207] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-920ab6f6-5306-44eb-a88a-ed19efce3e74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.662468] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 889.662468] env[68233]: value = "task-2782513" [ 889.662468] env[68233]: _type = "Task" [ 889.662468] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.670307] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782513, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.720258] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782511, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.762800] env[68233]: DEBUG nova.network.neutron [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Updating instance_info_cache with network_info: [{"id": "8dc8585f-e1a1-4d6c-8ce7-40de53add184", "address": "fa:16:3e:c8:b9:af", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dc8585f-e1", "ovs_interfaceid": "8dc8585f-e1a1-4d6c-8ce7-40de53add184", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.803667] env[68233]: INFO nova.compute.manager [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Took 48.18 seconds to build instance. [ 889.914959] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c368da2-6270-4a7c-9b18-571c8f249eba tempest-ServersAdminNegativeTestJSON-1184434562 tempest-ServersAdminNegativeTestJSON-1184434562-project-member] Lock "f7a1bfc5-7141-4764-b3fe-08d06020209a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.321s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.953014] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Releasing lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 889.953014] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Received event network-vif-deleted-85347f9f-ac3e-42ee-a5bd-bf41164a6cc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 889.953014] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Received event network-vif-plugged-8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 889.953014] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Acquiring lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.953014] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.953014] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.953014] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] No waiting events found dispatching network-vif-plugged-8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.953014] env[68233]: WARNING nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Received unexpected event network-vif-plugged-8dc8585f-e1a1-4d6c-8ce7-40de53add184 for instance with vm_state building and task_state spawning. [ 889.953014] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Received event network-changed-8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 889.953014] env[68233]: DEBUG nova.compute.manager [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Refreshing instance network info cache due to event network-changed-8dc8585f-e1a1-4d6c-8ce7-40de53add184. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 889.953014] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Acquiring lock "refresh_cache-c5c8bf0c-eb58-41bc-a316-b4ac78187658" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.018474] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782510, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.034890] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.034890] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.034890] env[68233]: DEBUG nova.network.neutron [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.180259] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782513, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.220950] env[68233]: DEBUG oslo_vmware.api [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782511, 'name': PowerOnVM_Task, 'duration_secs': 1.160514} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.221501] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.224588] env[68233]: DEBUG nova.compute.manager [None req-b01e3684-4c5f-4a88-b6fb-56033c4f267a tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.225403] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1484f6-6243-4b64-a401-b3a655a78d73 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.266607] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-c5c8bf0c-eb58-41bc-a316-b4ac78187658" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 890.267109] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Instance network_info: |[{"id": "8dc8585f-e1a1-4d6c-8ce7-40de53add184", "address": "fa:16:3e:c8:b9:af", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dc8585f-e1", "ovs_interfaceid": "8dc8585f-e1a1-4d6c-8ce7-40de53add184", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 890.267656] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Acquired lock "refresh_cache-c5c8bf0c-eb58-41bc-a316-b4ac78187658" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.267966] env[68233]: DEBUG nova.network.neutron [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Refreshing network info cache for port 8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.269919] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c8:b9:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8dc8585f-e1a1-4d6c-8ce7-40de53add184', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.283524] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating folder: Project (d32ae322ad5641b4bebd1aa390b5914f). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.288757] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe0516d4-2653-4841-bc73-96df0fdc2de0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.301954] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created folder: Project (d32ae322ad5641b4bebd1aa390b5914f) in parent group-v559223. [ 890.302300] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating folder: Instances. Parent ref: group-v559418. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.302642] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c314047b-647a-4922-95c6-2bf84d03f1a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.306474] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed6651d8-3583-4b69-9376-d2b1657cbda7 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.066s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.315407] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created folder: Instances in parent group-v559418. [ 890.315407] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 890.315407] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.315569] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e646f369-828e-4453-b2b3-3036b3d82b27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.337234] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.337234] env[68233]: value = "task-2782516" [ 890.337234] env[68233]: _type = "Task" [ 890.337234] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.347780] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782516, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.392027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d7d34411-0ba3-407d-afd1-0c22a8909a61 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.393966] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.181s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.395692] env[68233]: INFO nova.compute.claims [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.515431] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782510, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.592336] env[68233]: DEBUG nova.network.neutron [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Updated VIF entry in instance network info cache for port 8dc8585f-e1a1-4d6c-8ce7-40de53add184. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 890.592336] env[68233]: DEBUG nova.network.neutron [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Updating instance_info_cache with network_info: [{"id": "8dc8585f-e1a1-4d6c-8ce7-40de53add184", "address": "fa:16:3e:c8:b9:af", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8dc8585f-e1", "ovs_interfaceid": "8dc8585f-e1a1-4d6c-8ce7-40de53add184", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.606666] env[68233]: DEBUG nova.network.neutron [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.676257] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782513, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576987} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.676257] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/3cca16e1-3363-4026-9359-4ed2ba41e25d.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 890.676257] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 890.676257] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6c8fd679-38eb-4e35-ad75-a0be21167a9f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.692174] env[68233]: DEBUG nova.compute.manager [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Received event network-changed-4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 890.692174] env[68233]: DEBUG nova.compute.manager [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Refreshing instance network info cache due to event network-changed-4969d072-296f-454b-9621-58f95b90a8dd. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 890.692174] env[68233]: DEBUG oslo_concurrency.lockutils [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] Acquiring lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.692174] env[68233]: DEBUG oslo_concurrency.lockutils [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] Acquired lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.692174] env[68233]: DEBUG nova.network.neutron [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Refreshing network info cache for port 4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 890.694776] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 890.694776] env[68233]: value = "task-2782517" [ 890.694776] env[68233]: _type = "Task" [ 890.694776] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.712024] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782517, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.848107] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782516, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.869109] env[68233]: DEBUG nova.network.neutron [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.014876] env[68233]: DEBUG oslo_vmware.api [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782510, 'name': RemoveSnapshot_Task, 'duration_secs': 2.260275} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.015191] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 891.015462] env[68233]: INFO nova.compute.manager [None req-3f5e473c-d64b-45aa-a6c5-cf5c42aaf4ad tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Took 17.57 seconds to snapshot the instance on the hypervisor. [ 891.094594] env[68233]: DEBUG oslo_concurrency.lockutils [req-8bf621a7-c9ae-4f20-becc-dc13c8e29d4d req-4c6cce58-7571-4da9-8a2c-7299ee93e133 service nova] Releasing lock "refresh_cache-c5c8bf0c-eb58-41bc-a316-b4ac78187658" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.208195] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782517, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065876} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.208195] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 891.210104] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db7c9a8-3e92-42f5-b3a0-7da129aeed22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.235097] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/3cca16e1-3363-4026-9359-4ed2ba41e25d.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 891.235097] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3758668c-7a8f-4812-8ae6-67700569c528 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.262432] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 891.262432] env[68233]: value = "task-2782518" [ 891.262432] env[68233]: _type = "Task" [ 891.262432] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.271968] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782518, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.348769] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782516, 'name': CreateVM_Task, 'duration_secs': 0.762601} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.348934] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.349633] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.349791] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.350134] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 891.350395] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49f54672-7084-4b51-952b-67dc89f6dd84 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.355107] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 891.355107] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5245d4cf-7b76-d178-151f-4001e32a7849" [ 891.355107] env[68233]: _type = "Task" [ 891.355107] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.363091] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5245d4cf-7b76-d178-151f-4001e32a7849, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.371770] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.372115] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance network_info: |[{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.372627] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:5f:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9478083-21a3-4b61-ab65-e1281b8bac7b', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.381427] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating folder: Project (978c6dbf1c10443da3253a58f1e5bdea). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.381720] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f082ae69-67a7-4721-a393-6898048ed910 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.399312] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Created folder: Project (978c6dbf1c10443da3253a58f1e5bdea) in parent group-v559223. [ 891.399559] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating folder: Instances. Parent ref: group-v559421. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.399890] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b3f575ef-3722-404c-9ed7-d2da38c6a6e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.409737] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Created folder: Instances in parent group-v559421. [ 891.409737] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 891.409737] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.409910] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fddc8d81-ff13-42ac-8786-c1acf975c8d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.434415] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.434415] env[68233]: value = "task-2782521" [ 891.434415] env[68233]: _type = "Task" [ 891.434415] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.440331] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782521, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.616520] env[68233]: DEBUG nova.compute.manager [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-changed-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 891.616520] env[68233]: DEBUG nova.compute.manager [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Refreshing instance network info cache due to event network-changed-d9478083-21a3-4b61-ab65-e1281b8bac7b. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 891.616661] env[68233]: DEBUG oslo_concurrency.lockutils [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.616814] env[68233]: DEBUG oslo_concurrency.lockutils [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.616973] env[68233]: DEBUG nova.network.neutron [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Refreshing network info cache for port d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.655141] env[68233]: DEBUG nova.network.neutron [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updated VIF entry in instance network info cache for port 4969d072-296f-454b-9621-58f95b90a8dd. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 891.655141] env[68233]: DEBUG nova.network.neutron [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updating instance_info_cache with network_info: [{"id": "4969d072-296f-454b-9621-58f95b90a8dd", "address": "fa:16:3e:84:f6:ff", "network": {"id": "948796e1-4455-4621-a9ee-11fdd0b747af", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-380200332-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1f138d0135943fb87024b943f2dba25", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4969d072-29", "ovs_interfaceid": "4969d072-296f-454b-9621-58f95b90a8dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.773359] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782518, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.867257] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5245d4cf-7b76-d178-151f-4001e32a7849, 'name': SearchDatastore_Task, 'duration_secs': 0.024116} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.869980] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.872020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.872020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.872020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.872020] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.872020] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06dc8023-83a9-4794-94d2-f47c1623aad8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.882498] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.882715] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.883498] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33335115-ec8b-438b-86e2-9056dfbd6843 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.889198] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 891.889198] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f25f9b-e60c-58ad-e05f-47ddb9d6e25c" [ 891.889198] env[68233]: _type = "Task" [ 891.889198] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.902018] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f25f9b-e60c-58ad-e05f-47ddb9d6e25c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.926667] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460f431c-2788-4f59-8acc-c61e2f9b7986 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.941900] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce9995a-326d-4b60-8676-d607031ba708 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.950818] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782521, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.977519] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc72e41-172b-4303-ae49-a907839a2f57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.986810] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9118cc6b-7ff0-41ab-8afe-5e5770ca8b45 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.005892] env[68233]: DEBUG nova.compute.provider_tree [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.160623] env[68233]: DEBUG oslo_concurrency.lockutils [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] Releasing lock "refresh_cache-d0d6eed0-db5b-4371-8f03-b3415fd833f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.160623] env[68233]: DEBUG nova.compute.manager [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Received event network-changed-ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 892.160623] env[68233]: DEBUG nova.compute.manager [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Refreshing instance network info cache due to event network-changed-ef237162-2628-4a17-9afd-7a418911f222. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 892.160623] env[68233]: DEBUG oslo_concurrency.lockutils [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] Acquiring lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.160623] env[68233]: DEBUG oslo_concurrency.lockutils [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] Acquired lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.160623] env[68233]: DEBUG nova.network.neutron [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Refreshing network info cache for port ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.277424] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782518, 'name': ReconfigVM_Task, 'duration_secs': 0.815306} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.277700] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/3cca16e1-3363-4026-9359-4ed2ba41e25d.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 892.278720] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2df59cd-c647-4d6a-a34a-63b6f1b93be4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.285268] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 892.285268] env[68233]: value = "task-2782522" [ 892.285268] env[68233]: _type = "Task" [ 892.285268] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.293211] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782522, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.398721] env[68233]: DEBUG nova.network.neutron [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updated VIF entry in instance network info cache for port d9478083-21a3-4b61-ab65-e1281b8bac7b. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.401497] env[68233]: DEBUG nova.network.neutron [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.411852] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f25f9b-e60c-58ad-e05f-47ddb9d6e25c, 'name': SearchDatastore_Task, 'duration_secs': 0.011044} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.416538] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6fd159e-fc61-47df-abe7-b8478147755c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.422582] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 892.422582] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b43320-492a-7bba-e7a4-b98fadfcbbd6" [ 892.422582] env[68233]: _type = "Task" [ 892.422582] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.431397] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b43320-492a-7bba-e7a4-b98fadfcbbd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.444873] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782521, 'name': CreateVM_Task, 'duration_secs': 0.645188} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.445050] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.446059] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.446059] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.446207] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.446423] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b2b00e6-191d-436f-bb90-45955f2d48c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.451426] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 892.451426] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523429d3-e7a1-9852-41be-68be97a34779" [ 892.451426] env[68233]: _type = "Task" [ 892.451426] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.462109] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523429d3-e7a1-9852-41be-68be97a34779, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.509770] env[68233]: DEBUG nova.scheduler.client.report [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.796207] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782522, 'name': Rename_Task, 'duration_secs': 0.225543} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.796726] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 892.797357] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3967a644-400f-4ef3-aeeb-daeac7c88b36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.804159] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 892.804159] env[68233]: value = "task-2782523" [ 892.804159] env[68233]: _type = "Task" [ 892.804159] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.816947] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782523, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.908431] env[68233]: DEBUG oslo_concurrency.lockutils [req-6c511b3f-8d09-4ac2-a8a2-c99b51c373e7 req-a8bfebdb-45d3-4baa-a559-3d4ef2091da1 service nova] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.941891] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b43320-492a-7bba-e7a4-b98fadfcbbd6, 'name': SearchDatastore_Task, 'duration_secs': 0.015349} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.943299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.943671] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c5c8bf0c-eb58-41bc-a316-b4ac78187658/c5c8bf0c-eb58-41bc-a316-b4ac78187658.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.944384] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17c035a4-d3ea-4bf5-8783-1126e18b1453 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.958618] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 892.958618] env[68233]: value = "task-2782524" [ 892.958618] env[68233]: _type = "Task" [ 892.958618] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.967922] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523429d3-e7a1-9852-41be-68be97a34779, 'name': SearchDatastore_Task, 'duration_secs': 0.019741} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.968601] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.968839] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.969103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.969268] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.969453] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.969706] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89cf6dd3-e1f6-4a90-8c86-0158b73618d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.974327] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782524, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.980480] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.980662] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.981450] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-057d4bb1-a4dd-454a-b7f3-3fb35c005654 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.986402] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 892.986402] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fff360-1578-ef36-f1a5-946b703e3be0" [ 892.986402] env[68233]: _type = "Task" [ 892.986402] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.994090] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fff360-1578-ef36-f1a5-946b703e3be0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.998516] env[68233]: DEBUG nova.compute.manager [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 892.998702] env[68233]: DEBUG nova.compute.manager [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing instance network info cache due to event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 892.998910] env[68233]: DEBUG oslo_concurrency.lockutils [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.999188] env[68233]: DEBUG oslo_concurrency.lockutils [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.999373] env[68233]: DEBUG nova.network.neutron [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 893.019029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.019597] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.022211] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.937s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.023817] env[68233]: INFO nova.compute.claims [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 893.111919] env[68233]: DEBUG nova.network.neutron [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updated VIF entry in instance network info cache for port ef237162-2628-4a17-9afd-7a418911f222. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.112320] env[68233]: DEBUG nova.network.neutron [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updating instance_info_cache with network_info: [{"id": "ef237162-2628-4a17-9afd-7a418911f222", "address": "fa:16:3e:33:8a:2c", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef237162-26", "ovs_interfaceid": "ef237162-2628-4a17-9afd-7a418911f222", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.319427] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782523, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.419098] env[68233]: DEBUG nova.compute.manager [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.420594] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23369701-519c-480e-88bb-93fd6c319e49 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.470154] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782524, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.498284] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fff360-1578-ef36-f1a5-946b703e3be0, 'name': SearchDatastore_Task, 'duration_secs': 0.012564} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.499535] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67145420-1453-41ac-9f30-1b0fee008ee2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.509629] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 893.509629] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7a163-ec41-bfcf-70ca-5541fb4168c0" [ 893.509629] env[68233]: _type = "Task" [ 893.509629] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.519059] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7a163-ec41-bfcf-70ca-5541fb4168c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.538984] env[68233]: DEBUG nova.compute.utils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 893.538984] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 893.538984] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 893.575510] env[68233]: DEBUG nova.policy [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f77a0fdbc15849c6b1cf9518052a745d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ebf78d36f9e42eca135e60dc7dcc8c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 893.615773] env[68233]: DEBUG oslo_concurrency.lockutils [req-8239d50f-0916-4583-b220-191e7ae9c71f req-d35d3eb5-c92d-472e-87e8-80ba0e85d4d3 service nova] Releasing lock "refresh_cache-4677d047-f8dc-4501-be9b-14e6a2222f46" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.825739] env[68233]: DEBUG oslo_vmware.api [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782523, 'name': PowerOnVM_Task, 'duration_secs': 0.754978} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.825739] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 893.826252] env[68233]: INFO nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Took 10.21 seconds to spawn the instance on the hypervisor. [ 893.826252] env[68233]: DEBUG nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 893.827173] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bad672d-6ccc-4dd7-9ea0-4cd9ed0b1468 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.884869] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Successfully created port: 6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 893.935850] env[68233]: INFO nova.compute.manager [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] instance snapshotting [ 893.936724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f539e2-1563-4dde-9813-931137d85701 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.958749] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc11acb-2bb4-4d86-94e6-503f0c509450 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.971437] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782524, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595236} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.973420] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c5c8bf0c-eb58-41bc-a316-b4ac78187658/c5c8bf0c-eb58-41bc-a316-b4ac78187658.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.973598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.979987] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6304594f-7412-4b0c-a8f1-d2e5fd8dc114 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.984750] env[68233]: DEBUG nova.network.neutron [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updated VIF entry in instance network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.984885] env[68233]: DEBUG nova.network.neutron [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.991137] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 893.991137] env[68233]: value = "task-2782525" [ 893.991137] env[68233]: _type = "Task" [ 893.991137] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.000894] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782525, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.020869] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e7a163-ec41-bfcf-70ca-5541fb4168c0, 'name': SearchDatastore_Task, 'duration_secs': 0.016629} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.021162] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.021464] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 894.021729] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6482d88e-9beb-4c1d-9139-e8429ae76a95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.031025] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 894.031025] env[68233]: value = "task-2782526" [ 894.031025] env[68233]: _type = "Task" [ 894.031025] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.041446] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.042651] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 894.350389] env[68233]: INFO nova.compute.manager [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Took 45.79 seconds to build instance. [ 894.482786] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 894.482786] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4224b7d1-1f3e-42a8-bd51-686ff7fdfe7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.489760] env[68233]: DEBUG oslo_concurrency.lockutils [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.490108] env[68233]: DEBUG nova.compute.manager [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 894.490238] env[68233]: DEBUG nova.compute.manager [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing instance network info cache due to event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 894.490462] env[68233]: DEBUG oslo_concurrency.lockutils [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.490605] env[68233]: DEBUG oslo_concurrency.lockutils [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.490769] env[68233]: DEBUG nova.network.neutron [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 894.503756] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 894.503756] env[68233]: value = "task-2782527" [ 894.503756] env[68233]: _type = "Task" [ 894.503756] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.523309] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782525, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.087932} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.526958] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.527921] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782527, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.529338] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d80324-73ac-40ed-9262-13afd63b4f6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.568194] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] c5c8bf0c-eb58-41bc-a316-b4ac78187658/c5c8bf0c-eb58-41bc-a316-b4ac78187658.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.576383] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb37bc9d-6285-4914-bc30-616793e0b782 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.592504] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782526, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.599130] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 894.599130] env[68233]: value = "task-2782528" [ 894.599130] env[68233]: _type = "Task" [ 894.599130] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.609627] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782528, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.713169] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6966b2da-61ea-4cd3-8aaa-565ea5d4e401 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.722917] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f497a2f3-1f4c-4ae7-88c2-2d9dd6ca4e96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.754811] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e664e8dd-7259-41be-b66f-0a28d21d69c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.763312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68b58080-10c1-4f76-8e59-9e62156d7975 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.779061] env[68233]: DEBUG nova.compute.provider_tree [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.854740] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d792a423-3dac-4772-b253-d9b2de898870 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.277s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.025972] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782527, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.045293] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.738322} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.045293] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 895.045293] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 895.045293] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cd5c6bc-2128-4f22-bd75-67dd937f4b00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.051502] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 895.051502] env[68233]: value = "task-2782529" [ 895.051502] env[68233]: _type = "Task" [ 895.051502] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.060276] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.071068] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.100773] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 895.101467] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 895.101467] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 895.101467] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 895.101661] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 895.101701] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 895.101911] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 895.102076] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 895.102259] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 895.102442] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 895.102750] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 895.103724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2362070-f901-4d6a-845b-6a2da3c013ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.120281] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3839fd-8014-40ad-baa1-66f7e496d5fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.127471] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782528, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.184216] env[68233]: DEBUG nova.compute.manager [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 895.184442] env[68233]: DEBUG nova.compute.manager [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing instance network info cache due to event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 895.184635] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.282505] env[68233]: DEBUG nova.scheduler.client.report [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.480195] env[68233]: DEBUG nova.network.neutron [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updated VIF entry in instance network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 895.480581] env[68233]: DEBUG nova.network.neutron [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.525173] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782527, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.563295] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107651} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.563587] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.564398] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7551006-868d-4095-a31f-f4d31d9eac52 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.595768] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.596130] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4054470d-f553-4549-9efb-7b11f41edc5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.615301] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Successfully updated port: 6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 895.628012] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782528, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.630588] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 895.630588] env[68233]: value = "task-2782530" [ 895.630588] env[68233]: _type = "Task" [ 895.630588] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.640915] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782530, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.763554] env[68233]: INFO nova.compute.manager [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Rescuing [ 895.764767] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.764767] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.764767] env[68233]: DEBUG nova.network.neutron [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 895.790593] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.791142] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 895.796605] env[68233]: DEBUG nova.compute.manager [req-e9c8e610-1c06-440b-ac59-337f9657cc50 req-f33a772a-864f-4f44-b2d9-ef9c3d338c0d service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Received event network-vif-plugged-6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 895.796865] env[68233]: DEBUG oslo_concurrency.lockutils [req-e9c8e610-1c06-440b-ac59-337f9657cc50 req-f33a772a-864f-4f44-b2d9-ef9c3d338c0d service nova] Acquiring lock "0f813d55-2737-44ae-b62d-3321e77dfdab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.797245] env[68233]: DEBUG oslo_concurrency.lockutils [req-e9c8e610-1c06-440b-ac59-337f9657cc50 req-f33a772a-864f-4f44-b2d9-ef9c3d338c0d service nova] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.797418] env[68233]: DEBUG oslo_concurrency.lockutils [req-e9c8e610-1c06-440b-ac59-337f9657cc50 req-f33a772a-864f-4f44-b2d9-ef9c3d338c0d service nova] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.797774] env[68233]: DEBUG nova.compute.manager [req-e9c8e610-1c06-440b-ac59-337f9657cc50 req-f33a772a-864f-4f44-b2d9-ef9c3d338c0d service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] No waiting events found dispatching network-vif-plugged-6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 895.797774] env[68233]: WARNING nova.compute.manager [req-e9c8e610-1c06-440b-ac59-337f9657cc50 req-f33a772a-864f-4f44-b2d9-ef9c3d338c0d service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Received unexpected event network-vif-plugged-6e108ab9-42e1-4bcb-99cf-f0628d6e4692 for instance with vm_state building and task_state spawning. [ 895.799478] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.869s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.799723] env[68233]: DEBUG nova.objects.instance [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 895.983301] env[68233]: DEBUG oslo_concurrency.lockutils [req-52de0f47-9424-4174-8c0d-63339a88684b req-e6378f3e-080a-46fa-b1fc-02cb04ac52bf service nova] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.983978] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.984037] env[68233]: DEBUG nova.network.neutron [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 896.024623] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782527, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.125665] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "refresh_cache-0f813d55-2737-44ae-b62d-3321e77dfdab" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.125665] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "refresh_cache-0f813d55-2737-44ae-b62d-3321e77dfdab" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.125897] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 896.138909] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782528, 'name': ReconfigVM_Task, 'duration_secs': 1.267887} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.141447] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Reconfigured VM instance instance-00000049 to attach disk [datastore2] c5c8bf0c-eb58-41bc-a316-b4ac78187658/c5c8bf0c-eb58-41bc-a316-b4ac78187658.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.142408] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b00dd889-9544-412e-8d82-010b6ac94b42 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.154161] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782530, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.157304] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 896.157304] env[68233]: value = "task-2782531" [ 896.157304] env[68233]: _type = "Task" [ 896.157304] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.175151] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782531, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.303149] env[68233]: DEBUG nova.compute.utils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 896.303879] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 896.304206] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 896.350627] env[68233]: DEBUG nova.policy [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f77a0fdbc15849c6b1cf9518052a745d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2ebf78d36f9e42eca135e60dc7dcc8c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 896.502734] env[68233]: DEBUG nova.network.neutron [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Updating instance_info_cache with network_info: [{"id": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "address": "fa:16:3e:39:39:47", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dc9bf3-0d", "ovs_interfaceid": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.528324] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782527, 'name': CreateSnapshot_Task, 'duration_secs': 1.748114} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.528624] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 896.529409] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2853b480-7f94-4ebb-a02c-c302b058d947 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.646217] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782530, 'name': ReconfigVM_Task, 'duration_secs': 0.569536} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.646507] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Reconfigured VM instance instance-0000004a to attach disk [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 896.647147] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f057f1c4-b575-4de5-8819-fcd61af8d45d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.653970] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 896.653970] env[68233]: value = "task-2782532" [ 896.653970] env[68233]: _type = "Task" [ 896.653970] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.665535] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782532, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.670527] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782531, 'name': Rename_Task, 'duration_secs': 0.161657} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.670841] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.671158] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18490f3c-4c05-470d-b2a7-5d971798b8fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.677157] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 896.677157] env[68233]: value = "task-2782533" [ 896.677157] env[68233]: _type = "Task" [ 896.677157] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.691289] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.697678] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 896.810706] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 896.814812] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1d287ed8-bad1-46b7-8503-3973dcdf7195 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.819714] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.968s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.819975] env[68233]: DEBUG nova.objects.instance [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lazy-loading 'resources' on Instance uuid 2c219b8c-813d-4155-af3b-327a7ebd75fc {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 896.913825] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Updating instance_info_cache with network_info: [{"id": "6e108ab9-42e1-4bcb-99cf-f0628d6e4692", "address": "fa:16:3e:07:71:7d", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e108ab9-42", "ovs_interfaceid": "6e108ab9-42e1-4bcb-99cf-f0628d6e4692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.007664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.049389] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 897.050450] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a41f36e2-5b45-4c16-905f-0b8088c646e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.060460] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 897.060460] env[68233]: value = "task-2782534" [ 897.060460] env[68233]: _type = "Task" [ 897.060460] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.071566] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782534, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.173027] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782532, 'name': Rename_Task, 'duration_secs': 0.450381} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.173393] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 897.173974] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1948806d-94e9-4829-a957-7fe9689315b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.183551] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 897.183551] env[68233]: value = "task-2782535" [ 897.183551] env[68233]: _type = "Task" [ 897.183551] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.194984] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782533, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.203659] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782535, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.352738] env[68233]: DEBUG nova.network.neutron [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updated VIF entry in instance network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 897.352738] env[68233]: DEBUG nova.network.neutron [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 897.407572] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Successfully created port: 80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 897.419433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "refresh_cache-0f813d55-2737-44ae-b62d-3321e77dfdab" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.419894] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Instance network_info: |[{"id": "6e108ab9-42e1-4bcb-99cf-f0628d6e4692", "address": "fa:16:3e:07:71:7d", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e108ab9-42", "ovs_interfaceid": "6e108ab9-42e1-4bcb-99cf-f0628d6e4692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 897.420337] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:71:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e108ab9-42e1-4bcb-99cf-f0628d6e4692', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.428557] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.428810] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.429259] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-330694a5-8f8e-4be3-b2cc-34f9da8d00b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.459630] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.459630] env[68233]: value = "task-2782536" [ 897.459630] env[68233]: _type = "Task" [ 897.459630] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.475252] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782536, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.574306] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782534, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.620543] env[68233]: DEBUG nova.compute.manager [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 897.620793] env[68233]: DEBUG nova.compute.manager [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing instance network info cache due to event network-changed-5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 897.620993] env[68233]: DEBUG oslo_concurrency.lockutils [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] Acquiring lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.688155] env[68233]: DEBUG oslo_vmware.api [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782533, 'name': PowerOnVM_Task, 'duration_secs': 0.792293} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.694889] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.695322] env[68233]: INFO nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Took 11.37 seconds to spawn the instance on the hypervisor. [ 897.695388] env[68233]: DEBUG nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 897.697337] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aaabf5-cc56-474d-84a5-5704adca1845 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.705650] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782535, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.829741] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 897.854310] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d6271d3-6409-470d-baaf-73f6be4a717d req-ef695974-6384-4178-9273-a815903817b4 service nova] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.854707] env[68233]: DEBUG oslo_concurrency.lockutils [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] Acquired lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.854924] env[68233]: DEBUG nova.network.neutron [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Refreshing network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 897.858094] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.858229] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.858357] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.858532] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.858671] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.858818] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.859035] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.859231] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.859427] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.859612] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.859788] env[68233]: DEBUG nova.virt.hardware [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.860706] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9ac03c-8095-47e4-99e7-1379a6662e3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.872017] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae9c647-c203-405e-9693-f45e21fbc6f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.969972] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782536, 'name': CreateVM_Task, 'duration_secs': 0.359441} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.970187] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 897.970831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.970994] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.971354] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 897.971610] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0da92e46-9a6a-4b83-85a8-3b6e82d64420 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.976137] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 897.976137] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523f2099-8f6d-e530-f082-ff50d0823a6e" [ 897.976137] env[68233]: _type = "Task" [ 897.976137] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.984443] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523f2099-8f6d-e530-f082-ff50d0823a6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.986044] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80856a94-4670-43b3-b1e0-f22840f31ddc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.992569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca7a566-b2e7-4054-a214-9f99de44365f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.027359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d333c06b-b718-4192-9185-dcd65ef73b3d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.035552] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f455c7b-922f-4bb4-a994-9edec909856b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.052083] env[68233]: DEBUG nova.compute.provider_tree [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.072016] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782534, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.074374] env[68233]: DEBUG nova.compute.manager [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Received event network-changed-6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 898.074516] env[68233]: DEBUG nova.compute.manager [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Refreshing instance network info cache due to event network-changed-6e108ab9-42e1-4bcb-99cf-f0628d6e4692. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 898.074763] env[68233]: DEBUG oslo_concurrency.lockutils [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] Acquiring lock "refresh_cache-0f813d55-2737-44ae-b62d-3321e77dfdab" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.074915] env[68233]: DEBUG oslo_concurrency.lockutils [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] Acquired lock "refresh_cache-0f813d55-2737-44ae-b62d-3321e77dfdab" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.075095] env[68233]: DEBUG nova.network.neutron [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Refreshing network info cache for port 6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 898.202241] env[68233]: DEBUG oslo_vmware.api [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782535, 'name': PowerOnVM_Task, 'duration_secs': 0.632141} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.202685] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 898.203014] env[68233]: INFO nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Took 9.18 seconds to spawn the instance on the hypervisor. [ 898.203326] env[68233]: DEBUG nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 898.204588] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8439ffb9-e8a5-4e3f-9fc5-6a6d2dd6a432 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.225519] env[68233]: INFO nova.compute.manager [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Took 48.55 seconds to build instance. [ 898.488746] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523f2099-8f6d-e530-f082-ff50d0823a6e, 'name': SearchDatastore_Task, 'duration_secs': 0.023115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.489231] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 898.490361] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 898.490361] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.490361] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.490581] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 898.490907] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c20cb76-04ab-4427-800d-cae959f90e74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.501034] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 898.501363] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 898.502476] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-787685f4-00bf-4961-96be-2d63b336418e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.509708] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 898.509708] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d5c504-ae79-fb2d-21cb-01730732a981" [ 898.509708] env[68233]: _type = "Task" [ 898.509708] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.524392] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d5c504-ae79-fb2d-21cb-01730732a981, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.559128] env[68233]: DEBUG nova.scheduler.client.report [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.565019] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 898.565019] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd4eb9d6-7c43-4648-ac4a-0ea8117d8ec5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.582325] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782534, 'name': CloneVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.585594] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 898.585594] env[68233]: value = "task-2782537" [ 898.585594] env[68233]: _type = "Task" [ 898.585594] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.599647] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.674158] env[68233]: DEBUG nova.network.neutron [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updated VIF entry in instance network info cache for port 5d587ebc-2b71-4893-96e5-f636d9a634a4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.674492] env[68233]: DEBUG nova.network.neutron [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [{"id": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "address": "fa:16:3e:0c:dc:5f", "network": {"id": "8e0ee843-3424-4b87-96e3-1490619c717c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-633279754-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ed5b204b46f445e4a12938af770ecbb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5ba07329-1d3e-4ba8-8774-d029262318c4", "external-id": "nsx-vlan-transportzone-534", "segmentation_id": 534, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d587ebc-2b", "ovs_interfaceid": "5d587ebc-2b71-4893-96e5-f636d9a634a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.732926] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ae4fc2c-a3bf-4e98-9892-bf865805bd6c tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.804s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.740182] env[68233]: INFO nova.compute.manager [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Took 44.94 seconds to build instance. [ 898.750101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.750101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.750101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.750101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.750101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.753034] env[68233]: INFO nova.compute.manager [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Terminating instance [ 898.825512] env[68233]: DEBUG nova.network.neutron [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Updated VIF entry in instance network info cache for port 6e108ab9-42e1-4bcb-99cf-f0628d6e4692. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 898.825512] env[68233]: DEBUG nova.network.neutron [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Updating instance_info_cache with network_info: [{"id": "6e108ab9-42e1-4bcb-99cf-f0628d6e4692", "address": "fa:16:3e:07:71:7d", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e108ab9-42", "ovs_interfaceid": "6e108ab9-42e1-4bcb-99cf-f0628d6e4692", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.860412] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.860637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.022597] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d5c504-ae79-fb2d-21cb-01730732a981, 'name': SearchDatastore_Task, 'duration_secs': 0.0329} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.023089] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9477df55-380f-4f69-b248-41b5f2925ef2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.028717] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 899.028717] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52694828-445e-39dc-945b-e3f3fad4bc35" [ 899.028717] env[68233]: _type = "Task" [ 899.028717] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.037115] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52694828-445e-39dc-945b-e3f3fad4bc35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.066044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.246s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.072765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.003s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.074711] env[68233]: INFO nova.compute.claims [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 899.085603] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782534, 'name': CloneVM_Task, 'duration_secs': 1.578339} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.085824] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Created linked-clone VM from snapshot [ 899.086868] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b94a84-2558-488f-b8ab-b4de53cac6fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.094044] env[68233]: INFO nova.scheduler.client.report [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Deleted allocations for instance 2c219b8c-813d-4155-af3b-327a7ebd75fc [ 899.101748] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Uploading image 1d321d53-6cba-413d-adaa-5e6e9400cdca {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 899.107216] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782537, 'name': PowerOffVM_Task, 'duration_secs': 0.346013} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.107476] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.109277] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f93c06-f1b3-47f7-811c-34f070acba42 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.132079] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd16d3e-52e9-418e-aa97-59deffec9f80 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.144941] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 899.144941] env[68233]: value = "vm-559425" [ 899.144941] env[68233]: _type = "VirtualMachine" [ 899.144941] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 899.146026] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-23dbc454-0a94-47ed-b36c-da69667d6a85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.153029] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lease: (returnval){ [ 899.153029] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a51e95-2e3d-5254-dd7e-502ced641d54" [ 899.153029] env[68233]: _type = "HttpNfcLease" [ 899.153029] env[68233]: } obtained for exporting VM: (result){ [ 899.153029] env[68233]: value = "vm-559425" [ 899.153029] env[68233]: _type = "VirtualMachine" [ 899.153029] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 899.153029] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the lease: (returnval){ [ 899.153029] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a51e95-2e3d-5254-dd7e-502ced641d54" [ 899.153029] env[68233]: _type = "HttpNfcLease" [ 899.153029] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 899.160060] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.160060] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a51e95-2e3d-5254-dd7e-502ced641d54" [ 899.160060] env[68233]: _type = "HttpNfcLease" [ 899.160060] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 899.173046] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.174114] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95f2d5e6-1877-40f8-8975-656ca5f92426 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.177923] env[68233]: DEBUG oslo_concurrency.lockutils [req-ffb0f6d1-8405-48c1-bb00-5773eb74279d req-a6d04582-46b4-4688-a130-687e020a727e service nova] Releasing lock "refresh_cache-3d759f4f-3845-4bb5-8cfa-639b7023bb27" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.179128] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 899.179128] env[68233]: value = "task-2782539" [ 899.179128] env[68233]: _type = "Task" [ 899.179128] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.186442] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782539, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.244972] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Successfully updated port: 80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 899.247363] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cd77af3-2b10-4c63-b1e0-f50d9ed1a8c1 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.797s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.257235] env[68233]: DEBUG nova.compute.manager [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 899.257453] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.258342] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cd9651-843f-47bf-9537-9ece7c1020a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.270198] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.270522] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca40e6d2-949e-4f27-8a0f-0bece858ab5e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.276887] env[68233]: DEBUG oslo_vmware.api [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 899.276887] env[68233]: value = "task-2782540" [ 899.276887] env[68233]: _type = "Task" [ 899.276887] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.286116] env[68233]: DEBUG oslo_vmware.api [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.326127] env[68233]: DEBUG oslo_concurrency.lockutils [req-144eba82-febe-4de8-bee4-f7ec64d21778 req-b295421a-a6f9-4d0f-ada5-695f71cdc098 service nova] Releasing lock "refresh_cache-0f813d55-2737-44ae-b62d-3321e77dfdab" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.363645] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.468334] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.469794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.469794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.469794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.469794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.472533] env[68233]: INFO nova.compute.manager [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Terminating instance [ 899.538851] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52694828-445e-39dc-945b-e3f3fad4bc35, 'name': SearchDatastore_Task, 'duration_secs': 0.039916} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.539435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.539701] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0f813d55-2737-44ae-b62d-3321e77dfdab/0f813d55-2737-44ae-b62d-3321e77dfdab.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.539956] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2db1579b-d221-41b5-95c5-73c6b1d67bff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.547609] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 899.547609] env[68233]: value = "task-2782541" [ 899.547609] env[68233]: _type = "Task" [ 899.547609] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.555013] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.609863] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff1c1d3-0d34-4d78-8906-e9a5dcbf469f tempest-InstanceActionsV221TestJSON-1785172802 tempest-InstanceActionsV221TestJSON-1785172802-project-member] Lock "2c219b8c-813d-4155-af3b-327a7ebd75fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.408s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.662146] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 899.662146] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a51e95-2e3d-5254-dd7e-502ced641d54" [ 899.662146] env[68233]: _type = "HttpNfcLease" [ 899.662146] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 899.662567] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 899.662567] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a51e95-2e3d-5254-dd7e-502ced641d54" [ 899.662567] env[68233]: _type = "HttpNfcLease" [ 899.662567] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 899.663470] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba8acc1-140b-49e4-bc1f-0cce06de5b44 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.671521] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527050a8-c145-73f6-65eb-410aced2cb67/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 899.671596] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527050a8-c145-73f6-65eb-410aced2cb67/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 899.740915] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 899.741160] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 899.741411] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.741560] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.741734] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 899.742968] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db0f0969-ab39-4ccd-8e08-f5ad03aebcc8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.745900] env[68233]: DEBUG nova.compute.manager [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Received event network-vif-plugged-80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 899.745900] env[68233]: DEBUG oslo_concurrency.lockutils [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] Acquiring lock "903f0919-b321-4d74-9ea2-bc9771184ded-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.746469] env[68233]: DEBUG oslo_concurrency.lockutils [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] Lock "903f0919-b321-4d74-9ea2-bc9771184ded-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.746469] env[68233]: DEBUG oslo_concurrency.lockutils [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] Lock "903f0919-b321-4d74-9ea2-bc9771184ded-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.746469] env[68233]: DEBUG nova.compute.manager [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] No waiting events found dispatching network-vif-plugged-80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 899.746713] env[68233]: WARNING nova.compute.manager [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Received unexpected event network-vif-plugged-80978e6e-5b93-41d0-98a9-b2ca242940e5 for instance with vm_state building and task_state spawning. [ 899.746713] env[68233]: DEBUG nova.compute.manager [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Received event network-changed-80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 899.746841] env[68233]: DEBUG nova.compute.manager [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Refreshing instance network info cache due to event network-changed-80978e6e-5b93-41d0-98a9-b2ca242940e5. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 899.747060] env[68233]: DEBUG oslo_concurrency.lockutils [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] Acquiring lock "refresh_cache-903f0919-b321-4d74-9ea2-bc9771184ded" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.747226] env[68233]: DEBUG oslo_concurrency.lockutils [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] Acquired lock "refresh_cache-903f0919-b321-4d74-9ea2-bc9771184ded" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.747388] env[68233]: DEBUG nova.network.neutron [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Refreshing network info cache for port 80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.751515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "refresh_cache-903f0919-b321-4d74-9ea2-bc9771184ded" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.755950] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 899.756152] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 899.757261] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a63bacfb-9322-4a0f-af2b-16fa8f4cad94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.763426] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 899.763426] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52904ca4-3753-7dc9-8170-3650676bd18f" [ 899.763426] env[68233]: _type = "Task" [ 899.763426] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.773494] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52904ca4-3753-7dc9-8170-3650676bd18f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.787273] env[68233]: DEBUG oslo_vmware.api [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782540, 'name': PowerOffVM_Task, 'duration_secs': 0.389971} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.787622] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.787622] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.788020] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-561a2a50-5829-4957-9139-32d6754545e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.840073] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ae0bbd98-f3cb-420e-8717-b7f87918ec24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.877975] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.877975] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.878181] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Deleting the datastore file [datastore1] 3d759f4f-3845-4bb5-8cfa-639b7023bb27 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.878363] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59c6da3c-1644-4979-948a-e157084e1afd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.890589] env[68233]: DEBUG oslo_vmware.api [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for the task: (returnval){ [ 899.890589] env[68233]: value = "task-2782543" [ 899.890589] env[68233]: _type = "Task" [ 899.890589] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.896140] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 899.902787] env[68233]: DEBUG oslo_vmware.api [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782543, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.976921] env[68233]: DEBUG nova.compute.manager [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 899.977205] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.978293] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e43105e-49ea-4753-b4e5-9792203695da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.990217] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.990575] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61f82a16-1452-47b7-9271-c60641738e28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.999598] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 899.999598] env[68233]: value = "task-2782544" [ 899.999598] env[68233]: _type = "Task" [ 899.999598] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.008630] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782544, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.058301] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782541, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.279020] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52904ca4-3753-7dc9-8170-3650676bd18f, 'name': SearchDatastore_Task, 'duration_secs': 0.068315} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.279359] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89229217-b6a3-4bc7-95f1-6fbbfa101114 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.289292] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 900.289292] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f39b33-5a9a-bb67-cf1e-140c9f3401d8" [ 900.289292] env[68233]: _type = "Task" [ 900.289292] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.299487] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f39b33-5a9a-bb67-cf1e-140c9f3401d8, 'name': SearchDatastore_Task, 'duration_secs': 0.011212} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.299733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.303121] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. {{(pid=68233) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 900.303121] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0da56d92-e5d1-4c87-8188-cf3d809c7965 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.307318] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 900.307318] env[68233]: value = "task-2782545" [ 900.307318] env[68233]: _type = "Task" [ 900.307318] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.320540] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782545, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.326058] env[68233]: DEBUG nova.network.neutron [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 900.382184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "32e05800-e812-412a-b049-89178737cffd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.382639] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "32e05800-e812-412a-b049-89178737cffd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.406305] env[68233]: DEBUG oslo_vmware.api [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Task: {'id': task-2782543, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369461} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.406813] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.407538] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.408489] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.408489] env[68233]: INFO nova.compute.manager [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Took 1.15 seconds to destroy the instance on the hypervisor. [ 900.408934] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 900.412718] env[68233]: DEBUG nova.compute.manager [-] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 900.412982] env[68233]: DEBUG nova.network.neutron [-] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 900.486829] env[68233]: DEBUG nova.network.neutron [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.511654] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782544, 'name': PowerOffVM_Task, 'duration_secs': 0.268158} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.515204] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.515606] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 900.516557] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e5e0702-5c09-4e41-83b0-ac386518b055 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.568448] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.569017] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0f813d55-2737-44ae-b62d-3321e77dfdab/0f813d55-2737-44ae-b62d-3321e77dfdab.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 900.569440] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 900.569638] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0c67549-22aa-434d-b511-86bd4ba4e99f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.576809] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 900.576809] env[68233]: value = "task-2782547" [ 900.576809] env[68233]: _type = "Task" [ 900.576809] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.591910] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.595553] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 900.595553] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 900.595553] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleting the datastore file [datastore2] c5c8bf0c-eb58-41bc-a316-b4ac78187658 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.595553] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40f3cf28-89ed-4ffb-a1c5-40072260aa54 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.609028] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 900.609028] env[68233]: value = "task-2782548" [ 900.609028] env[68233]: _type = "Task" [ 900.609028] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.628031] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.680944] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a086f35c-3ced-4cb1-ac47-b2716b158685 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.692195] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c948648a-5bdd-4cf9-a62f-f4b1139b7eda {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.730348] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdfb7c52-4255-4ef6-8108-c01a1910f08c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.744447] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60625e96-48be-4602-812d-e8fb1dc0fcb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.762760] env[68233]: DEBUG nova.compute.provider_tree [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.784931] env[68233]: DEBUG nova.compute.manager [req-ea123c4d-32dd-428f-b3f1-8bfd6e6974e5 req-49021644-d76f-439e-8a92-5e5af9fefdc7 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Received event network-vif-deleted-5d587ebc-2b71-4893-96e5-f636d9a634a4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 900.785310] env[68233]: INFO nova.compute.manager [req-ea123c4d-32dd-428f-b3f1-8bfd6e6974e5 req-49021644-d76f-439e-8a92-5e5af9fefdc7 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Neutron deleted interface 5d587ebc-2b71-4893-96e5-f636d9a634a4; detaching it from the instance and deleting it from the info cache [ 900.786048] env[68233]: DEBUG nova.network.neutron [req-ea123c4d-32dd-428f-b3f1-8bfd6e6974e5 req-49021644-d76f-439e-8a92-5e5af9fefdc7 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.821613] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782545, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.886130] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 900.990325] env[68233]: DEBUG oslo_concurrency.lockutils [req-b91c70d1-8cb2-4e56-9ce7-0e3617d59893 req-c3e3032a-a35a-4203-b317-d6f448ecd9b5 service nova] Releasing lock "refresh_cache-903f0919-b321-4d74-9ea2-bc9771184ded" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.990786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "refresh_cache-903f0919-b321-4d74-9ea2-bc9771184ded" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.990978] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.089302] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084732} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.089564] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 901.090374] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7d3ad6-892e-4d11-8975-cba6aced77b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.114591] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Reconfiguring VM instance instance-0000004b to attach disk [datastore2] 0f813d55-2737-44ae-b62d-3321e77dfdab/0f813d55-2737-44ae-b62d-3321e77dfdab.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.115027] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-101cdc26-d967-4a4f-bb64-0ee819359457 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.146427] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782548, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.148178] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 901.148178] env[68233]: value = "task-2782549" [ 901.148178] env[68233]: _type = "Task" [ 901.148178] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.157426] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782549, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.231466] env[68233]: DEBUG nova.network.neutron [-] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.265096] env[68233]: DEBUG nova.scheduler.client.report [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.289513] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-533b5016-2f15-4acd-b841-4708c0ad84f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.299269] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dca5674-57b6-4660-8117-fd3d2e42e95f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.323212] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782545, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63216} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.343126] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. [ 901.343955] env[68233]: DEBUG nova.compute.manager [req-ea123c4d-32dd-428f-b3f1-8bfd6e6974e5 req-49021644-d76f-439e-8a92-5e5af9fefdc7 service nova] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Detach interface failed, port_id=5d587ebc-2b71-4893-96e5-f636d9a634a4, reason: Instance 3d759f4f-3845-4bb5-8cfa-639b7023bb27 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 901.345136] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03871241-6ead-4d74-9c95-c000de25ea6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.372266] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 901.372577] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee9c16fa-06e5-4b1c-8f85-28c130f502d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.391447] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 901.391447] env[68233]: value = "task-2782550" [ 901.391447] env[68233]: _type = "Task" [ 901.391447] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.402963] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782550, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.420713] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.533157] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 901.624913] env[68233]: DEBUG oslo_vmware.api [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782548, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.579551} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.625280] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 901.625556] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 901.625801] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 901.626009] env[68233]: INFO nova.compute.manager [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Took 1.65 seconds to destroy the instance on the hypervisor. [ 901.626453] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 901.626541] env[68233]: DEBUG nova.compute.manager [-] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 901.626636] env[68233]: DEBUG nova.network.neutron [-] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 901.658351] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782549, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.676183] env[68233]: DEBUG nova.network.neutron [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Updating instance_info_cache with network_info: [{"id": "80978e6e-5b93-41d0-98a9-b2ca242940e5", "address": "fa:16:3e:88:8b:de", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80978e6e-5b", "ovs_interfaceid": "80978e6e-5b93-41d0-98a9-b2ca242940e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.733025] env[68233]: INFO nova.compute.manager [-] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Took 1.32 seconds to deallocate network for instance. [ 901.770276] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.770888] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.774808] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.207s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.775125] env[68233]: DEBUG nova.objects.instance [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lazy-loading 'resources' on Instance uuid 03688e90-5433-47ca-baaa-75861ad093b7 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.904523] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782550, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.162559] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782549, 'name': ReconfigVM_Task, 'duration_secs': 0.934706} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.162559] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Reconfigured VM instance instance-0000004b to attach disk [datastore2] 0f813d55-2737-44ae-b62d-3321e77dfdab/0f813d55-2737-44ae-b62d-3321e77dfdab.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.163265] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-47ab43ad-2205-47f8-aaee-b3b5727e6ad8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.174938] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 902.174938] env[68233]: value = "task-2782551" [ 902.174938] env[68233]: _type = "Task" [ 902.174938] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.178905] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "refresh_cache-903f0919-b321-4d74-9ea2-bc9771184ded" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 902.179833] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Instance network_info: |[{"id": "80978e6e-5b93-41d0-98a9-b2ca242940e5", "address": "fa:16:3e:88:8b:de", "network": {"id": "e0eb34f6-caa9-4b3b-ac21-375345c8c093", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1803993119-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2ebf78d36f9e42eca135e60dc7dcc8c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "99639c37-b0c6-4be7-9594-230e44b1280b", "external-id": "nsx-vlan-transportzone-17", "segmentation_id": 17, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap80978e6e-5b", "ovs_interfaceid": "80978e6e-5b93-41d0-98a9-b2ca242940e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 902.184397] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:8b:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '99639c37-b0c6-4be7-9594-230e44b1280b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '80978e6e-5b93-41d0-98a9-b2ca242940e5', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 902.193068] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.193315] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782551, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.193715] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 902.194524] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-125717db-8ef3-4fc0-a27a-aa99bb75617a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.216509] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 902.216509] env[68233]: value = "task-2782552" [ 902.216509] env[68233]: _type = "Task" [ 902.216509] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.228053] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782552, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.241415] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.279574] env[68233]: DEBUG nova.compute.utils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 902.285932] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 902.286199] env[68233]: DEBUG nova.network.neutron [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 902.349635] env[68233]: DEBUG nova.policy [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '839b33e7aa11482882403ddc2319583f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '853a057cfba3400ba05c89cb1d292f61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 902.402366] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782550, 'name': ReconfigVM_Task, 'duration_secs': 0.815729} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.402581] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 902.405191] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853a395f-2eab-49eb-9c99-b9319a45c2ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.434992] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c37a472-354f-4a39-bdc4-700ca6b59b1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.446854] env[68233]: DEBUG nova.compute.manager [req-b4f9f430-60d9-4fe8-8f88-9700ef84f5db req-9d45a1bd-a4a8-4484-afd2-3341f441003b service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Received event network-vif-deleted-8dc8585f-e1a1-4d6c-8ce7-40de53add184 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 902.447080] env[68233]: INFO nova.compute.manager [req-b4f9f430-60d9-4fe8-8f88-9700ef84f5db req-9d45a1bd-a4a8-4484-afd2-3341f441003b service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Neutron deleted interface 8dc8585f-e1a1-4d6c-8ce7-40de53add184; detaching it from the instance and deleting it from the info cache [ 902.447260] env[68233]: DEBUG nova.network.neutron [req-b4f9f430-60d9-4fe8-8f88-9700ef84f5db req-9d45a1bd-a4a8-4484-afd2-3341f441003b service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.454353] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 902.454353] env[68233]: value = "task-2782553" [ 902.454353] env[68233]: _type = "Task" [ 902.454353] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.471015] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.684231] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782551, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.685225] env[68233]: DEBUG nova.network.neutron [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Successfully created port: 2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 902.726476] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782552, 'name': CreateVM_Task, 'duration_secs': 0.379136} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.726737] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.727409] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.727961] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.728137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 902.731498] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24efd298-78a7-4c8a-ba12-e9b6417a6f23 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.736968] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 902.736968] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c71ad5-ab92-cbed-b111-5b84591fe549" [ 902.736968] env[68233]: _type = "Task" [ 902.736968] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.745150] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c71ad5-ab92-cbed-b111-5b84591fe549, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.785981] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.853814] env[68233]: DEBUG nova.network.neutron [-] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.893772] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d38507-3d6a-4be6-a554-efaef8dcc31a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.902576] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc838a36-c733-4e13-a810-305078b09a8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.942510] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a98c8fb-5f67-4d97-bba9-9fb117144b26 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.949858] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87ea53f2-a46d-4597-80b4-210565235134 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.956038] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a27b945-7cff-469c-ae6c-7afb408d58a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.966154] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970b805f-a5f0-4600-a6b7-5bd3bc5b77df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.988544] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.989141] env[68233]: DEBUG nova.compute.provider_tree [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.023036] env[68233]: DEBUG nova.compute.manager [req-b4f9f430-60d9-4fe8-8f88-9700ef84f5db req-9d45a1bd-a4a8-4484-afd2-3341f441003b service nova] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Detach interface failed, port_id=8dc8585f-e1a1-4d6c-8ce7-40de53add184, reason: Instance c5c8bf0c-eb58-41bc-a316-b4ac78187658 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 903.185821] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782551, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.248165] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c71ad5-ab92-cbed-b111-5b84591fe549, 'name': SearchDatastore_Task, 'duration_secs': 0.015741} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.248482] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.248722] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 903.248968] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.249150] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 903.249373] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.249652] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d931f904-664e-48e2-8802-3d9ee3308dad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.258055] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.258240] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 903.258954] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a20df494-3f24-4e4e-9171-54de80d625bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.264126] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 903.264126] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5257709c-f7ad-fb66-8812-de8c9d714270" [ 903.264126] env[68233]: _type = "Task" [ 903.264126] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.271401] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5257709c-f7ad-fb66-8812-de8c9d714270, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.357756] env[68233]: INFO nova.compute.manager [-] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Took 1.73 seconds to deallocate network for instance. [ 903.468539] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782553, 'name': ReconfigVM_Task, 'duration_secs': 0.938763} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.468539] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.468783] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6ecb9a4-50df-4f9f-9e94-be5cf1e2708e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.475588] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 903.475588] env[68233]: value = "task-2782554" [ 903.475588] env[68233]: _type = "Task" [ 903.475588] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.485918] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.497028] env[68233]: DEBUG nova.scheduler.client.report [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.686496] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782551, 'name': Rename_Task, 'duration_secs': 1.180267} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.686802] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 903.687087] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-495de8cd-d76e-4731-84fa-3876fce3a35b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.693337] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 903.693337] env[68233]: value = "task-2782555" [ 903.693337] env[68233]: _type = "Task" [ 903.693337] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.702011] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782555, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.775272] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5257709c-f7ad-fb66-8812-de8c9d714270, 'name': SearchDatastore_Task, 'duration_secs': 0.012958} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.776080] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cccc2513-dbca-49bd-86e2-6436dfebbdde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.781277] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 903.781277] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5201f351-e2ce-1e57-96da-ee2cffc8e4b2" [ 903.781277] env[68233]: _type = "Task" [ 903.781277] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.788975] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5201f351-e2ce-1e57-96da-ee2cffc8e4b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.795306] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.822221] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9f8b2f4b15f101bfbc560b365697fbd3',container_format='bare',created_at=2025-03-06T03:54:13Z,direct_url=,disk_format='vmdk',id=fa91d76b-2020-4abf-8837-92f1504eee4f,min_disk=1,min_ram=0,name='tempest-test-snap-533457875',owner='853a057cfba3400ba05c89cb1d292f61',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-06T03:54:28Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 903.822640] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 903.822855] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 903.823137] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 903.823278] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 903.823434] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 903.823641] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 903.823802] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 903.823972] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 903.824152] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 903.824332] env[68233]: DEBUG nova.virt.hardware [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 903.825201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81bd996b-2c73-42a5-946a-723709ea7eb9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.833254] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e60279-43ab-4166-ab3b-801ff6414a28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.865172] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.987679] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782554, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.001801] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.004776] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.339s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.005298] env[68233]: DEBUG nova.objects.instance [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lazy-loading 'resources' on Instance uuid bb59f959-4cf8-4244-b7b4-6bf630a616b3 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 904.028690] env[68233]: INFO nova.scheduler.client.report [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted allocations for instance 03688e90-5433-47ca-baaa-75861ad093b7 [ 904.153596] env[68233]: DEBUG nova.compute.manager [req-574772ab-3d7c-4412-9a26-10a437afaef4 req-663d05e0-2799-4114-be8e-b6380eb3ecab service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Received event network-vif-plugged-2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 904.153872] env[68233]: DEBUG oslo_concurrency.lockutils [req-574772ab-3d7c-4412-9a26-10a437afaef4 req-663d05e0-2799-4114-be8e-b6380eb3ecab service nova] Acquiring lock "1207585c-fb2a-43b7-aec2-c3a7889255a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.154088] env[68233]: DEBUG oslo_concurrency.lockutils [req-574772ab-3d7c-4412-9a26-10a437afaef4 req-663d05e0-2799-4114-be8e-b6380eb3ecab service nova] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.154296] env[68233]: DEBUG oslo_concurrency.lockutils [req-574772ab-3d7c-4412-9a26-10a437afaef4 req-663d05e0-2799-4114-be8e-b6380eb3ecab service nova] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.154469] env[68233]: DEBUG nova.compute.manager [req-574772ab-3d7c-4412-9a26-10a437afaef4 req-663d05e0-2799-4114-be8e-b6380eb3ecab service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] No waiting events found dispatching network-vif-plugged-2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.154641] env[68233]: WARNING nova.compute.manager [req-574772ab-3d7c-4412-9a26-10a437afaef4 req-663d05e0-2799-4114-be8e-b6380eb3ecab service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Received unexpected event network-vif-plugged-2adac63b-c8ff-490f-bcf1-316fb58c480a for instance with vm_state building and task_state spawning. [ 904.204419] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782555, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.238259] env[68233]: DEBUG nova.network.neutron [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Successfully updated port: 2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 904.291778] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5201f351-e2ce-1e57-96da-ee2cffc8e4b2, 'name': SearchDatastore_Task, 'duration_secs': 0.012514} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.292059] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.292346] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 903f0919-b321-4d74-9ea2-bc9771184ded/903f0919-b321-4d74-9ea2-bc9771184ded.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.292609] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a9968df-804a-418f-b728-136881989ebf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.298166] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 904.298166] env[68233]: value = "task-2782556" [ 904.298166] env[68233]: _type = "Task" [ 904.298166] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.305966] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.487122] env[68233]: DEBUG oslo_vmware.api [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782554, 'name': PowerOnVM_Task, 'duration_secs': 0.512593} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.487392] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.489972] env[68233]: DEBUG nova.compute.manager [None req-c153cdd9-2a07-4963-a66d-c7b33b3f484e tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.490736] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d78c0fe-49f7-4608-9bb4-9405cb8fa996 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.539922] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60a7849e-cff9-47de-8f5b-72f5267ac769 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "03688e90-5433-47ca-baaa-75861ad093b7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.315s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.709163] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782555, 'name': PowerOnVM_Task, 'duration_secs': 0.601386} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.709448] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 904.709592] env[68233]: INFO nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Took 9.64 seconds to spawn the instance on the hypervisor. [ 904.709768] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 904.710772] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f69cd59-4990-47ad-a311-32a21bbdc293 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.742439] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "refresh_cache-1207585c-fb2a-43b7-aec2-c3a7889255a5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.742439] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "refresh_cache-1207585c-fb2a-43b7-aec2-c3a7889255a5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.742439] env[68233]: DEBUG nova.network.neutron [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.810236] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782556, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.942773] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2c0125-6295-45bf-b210-7a1b477adcc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.952089] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47f94e3-21c8-41fa-a6b0-28a1cfe37adc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.983074] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c897b21-bfe4-4b54-ac40-a09088a1778b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.990675] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d011e9b0-bd66-4f27-9a21-5636e149b27e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.007068] env[68233]: DEBUG nova.compute.provider_tree [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.197991] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "abdf9de2-8563-4a31-91a3-0c18b0387533" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.198436] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.198827] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "abdf9de2-8563-4a31-91a3-0c18b0387533-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.199102] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.199405] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.201977] env[68233]: INFO nova.compute.manager [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Terminating instance [ 905.235302] env[68233]: INFO nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Took 34.04 seconds to build instance. [ 905.282590] env[68233]: DEBUG nova.network.neutron [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 905.314706] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782556, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.007524} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.314981] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 903f0919-b321-4d74-9ea2-bc9771184ded/903f0919-b321-4d74-9ea2-bc9771184ded.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.315386] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.315474] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9485570a-497c-45be-a145-1bc0b0d7635f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.322877] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 905.322877] env[68233]: value = "task-2782557" [ 905.322877] env[68233]: _type = "Task" [ 905.322877] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.333200] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782557, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.510668] env[68233]: DEBUG nova.scheduler.client.report [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.650133] env[68233]: DEBUG nova.network.neutron [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Updating instance_info_cache with network_info: [{"id": "2adac63b-c8ff-490f-bcf1-316fb58c480a", "address": "fa:16:3e:33:20:97", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adac63b-c8", "ovs_interfaceid": "2adac63b-c8ff-490f-bcf1-316fb58c480a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.706794] env[68233]: DEBUG nova.compute.manager [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 905.706794] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 905.708037] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcf9192-8d78-4335-9bf2-abbe7cacdcfe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.717030] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.717305] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc2567f4-2e54-49ee-bec3-4ce0ca1a3822 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.724207] env[68233]: DEBUG oslo_vmware.api [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 905.724207] env[68233]: value = "task-2782558" [ 905.724207] env[68233]: _type = "Task" [ 905.724207] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.732764] env[68233]: DEBUG oslo_vmware.api [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.736848] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.557s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.810429] env[68233]: INFO nova.compute.manager [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Unrescuing [ 905.810765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.810949] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquired lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.811152] env[68233]: DEBUG nova.network.neutron [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 905.834418] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782557, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07425} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.834739] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.835637] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67627cea-8924-4cd9-94fd-9fbc80b5020b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.859068] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 903f0919-b321-4d74-9ea2-bc9771184ded/903f0919-b321-4d74-9ea2-bc9771184ded.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.859907] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a0d6a94-8736-4ef7-a064-743f6782cf5a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.881175] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 905.881175] env[68233]: value = "task-2782559" [ 905.881175] env[68233]: _type = "Task" [ 905.881175] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.890756] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.017328] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.012s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.019994] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.181s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.021521] env[68233]: INFO nova.compute.claims [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.043172] env[68233]: INFO nova.scheduler.client.report [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Deleted allocations for instance bb59f959-4cf8-4244-b7b4-6bf630a616b3 [ 906.151839] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "refresh_cache-1207585c-fb2a-43b7-aec2-c3a7889255a5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.152333] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Instance network_info: |[{"id": "2adac63b-c8ff-490f-bcf1-316fb58c480a", "address": "fa:16:3e:33:20:97", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adac63b-c8", "ovs_interfaceid": "2adac63b-c8ff-490f-bcf1-316fb58c480a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 906.152893] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:20:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2adac63b-c8ff-490f-bcf1-316fb58c480a', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 906.161791] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 906.162467] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 906.162805] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-968ce21d-20af-4e6c-8dac-5c2788bdcbd3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.185691] env[68233]: DEBUG nova.compute.manager [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Received event network-changed-2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 906.186019] env[68233]: DEBUG nova.compute.manager [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Refreshing instance network info cache due to event network-changed-2adac63b-c8ff-490f-bcf1-316fb58c480a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 906.186145] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] Acquiring lock "refresh_cache-1207585c-fb2a-43b7-aec2-c3a7889255a5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.186298] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] Acquired lock "refresh_cache-1207585c-fb2a-43b7-aec2-c3a7889255a5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.186501] env[68233]: DEBUG nova.network.neutron [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Refreshing network info cache for port 2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.190033] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 906.190033] env[68233]: value = "task-2782560" [ 906.190033] env[68233]: _type = "Task" [ 906.190033] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.200026] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782560, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.235343] env[68233]: DEBUG oslo_vmware.api [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782558, 'name': PowerOffVM_Task, 'duration_secs': 0.291724} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.235629] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 906.235803] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 906.236086] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41bb68d2-be80-42c1-9d1f-f092e47df6bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.327950] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 906.327950] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 906.327950] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleting the datastore file [datastore1] abdf9de2-8563-4a31-91a3-0c18b0387533 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 906.327950] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5358332-9894-4c9f-88ef-44a2d40acfe9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.337919] env[68233]: DEBUG oslo_vmware.api [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 906.337919] env[68233]: value = "task-2782562" [ 906.337919] env[68233]: _type = "Task" [ 906.337919] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.346788] env[68233]: DEBUG oslo_vmware.api [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.392252] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782559, 'name': ReconfigVM_Task, 'duration_secs': 0.380458} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.394984] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 903f0919-b321-4d74-9ea2-bc9771184ded/903f0919-b321-4d74-9ea2-bc9771184ded.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.395699] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd7e995c-f812-4339-9431-c62208778ff5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.403110] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 906.403110] env[68233]: value = "task-2782563" [ 906.403110] env[68233]: _type = "Task" [ 906.403110] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.412267] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782563, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.525537] env[68233]: DEBUG nova.network.neutron [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Updating instance_info_cache with network_info: [{"id": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "address": "fa:16:3e:39:39:47", "network": {"id": "1a40774d-db95-4fb4-a68d-6aa3b6595fcc", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-2045592130-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "f693e1f45b0d4fc0b871ae4dd2df6c4e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ee1c76d-1a61-4546-85cb-d4bd3c1b35ef", "external-id": "nsx-vlan-transportzone-161", "segmentation_id": 161, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3dc9bf3-0d", "ovs_interfaceid": "f3dc9bf3-0d50-44df-89a6-52cfc6899cad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.553577] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a691dfb-a9e7-4dc9-921d-b14d34aa81f4 tempest-ServerMetadataNegativeTestJSON-1577810662 tempest-ServerMetadataNegativeTestJSON-1577810662-project-member] Lock "bb59f959-4cf8-4244-b7b4-6bf630a616b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.755s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.703635] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782560, 'name': CreateVM_Task, 'duration_secs': 0.398046} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.703783] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.704483] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.704668] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.705031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.705307] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d142d87-deea-455b-a926-58fa7deaaa9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.713646] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 906.713646] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241c887-5878-8d3a-e316-18d0bd6598af" [ 906.713646] env[68233]: _type = "Task" [ 906.713646] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.728223] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5241c887-5878-8d3a-e316-18d0bd6598af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.849140] env[68233]: DEBUG oslo_vmware.api [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201989} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.849140] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 906.849140] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 906.849346] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.849526] env[68233]: INFO nova.compute.manager [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Took 1.14 seconds to destroy the instance on the hypervisor. [ 906.849830] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 906.850035] env[68233]: DEBUG nova.compute.manager [-] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 906.850185] env[68233]: DEBUG nova.network.neutron [-] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 906.915093] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782563, 'name': Rename_Task, 'duration_secs': 0.202035} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.915583] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.916744] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6c5cacc-e8c0-4f92-b177-347e9e035e59 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.920896] env[68233]: DEBUG nova.network.neutron [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Updated VIF entry in instance network info cache for port 2adac63b-c8ff-490f-bcf1-316fb58c480a. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 906.921363] env[68233]: DEBUG nova.network.neutron [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Updating instance_info_cache with network_info: [{"id": "2adac63b-c8ff-490f-bcf1-316fb58c480a", "address": "fa:16:3e:33:20:97", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2adac63b-c8", "ovs_interfaceid": "2adac63b-c8ff-490f-bcf1-316fb58c480a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.924135] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 906.924135] env[68233]: value = "task-2782564" [ 906.924135] env[68233]: _type = "Task" [ 906.924135] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.933059] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782564, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.031158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Releasing lock "refresh_cache-3cca16e1-3363-4026-9359-4ed2ba41e25d" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.032098] env[68233]: DEBUG nova.objects.instance [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lazy-loading 'flavor' on Instance uuid 3cca16e1-3363-4026-9359-4ed2ba41e25d {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 907.226532] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.226532] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Processing image fa91d76b-2020-4abf-8837-92f1504eee4f {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.226778] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.227013] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.227110] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.232069] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ee20d59-7c36-4c64-85b8-f76c1af92e22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.242681] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.243615] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.243821] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84c4b064-8315-476f-a71a-840128e7ed93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.251906] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 907.251906] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527e0435-2c09-df3b-57a9-18dd65bc8e07" [ 907.251906] env[68233]: _type = "Task" [ 907.251906] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.260683] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527e0435-2c09-df3b-57a9-18dd65bc8e07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.343579] env[68233]: DEBUG nova.compute.manager [req-b8db041b-57f8-434d-9d56-416c2215a94f req-89d24424-39e0-4c49-bf94-ad9bad099fda service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Received event network-vif-deleted-f6902fc8-b8ad-4c4c-8056-c9fbf48669b9 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 907.343579] env[68233]: INFO nova.compute.manager [req-b8db041b-57f8-434d-9d56-416c2215a94f req-89d24424-39e0-4c49-bf94-ad9bad099fda service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Neutron deleted interface f6902fc8-b8ad-4c4c-8056-c9fbf48669b9; detaching it from the instance and deleting it from the info cache [ 907.343749] env[68233]: DEBUG nova.network.neutron [req-b8db041b-57f8-434d-9d56-416c2215a94f req-89d24424-39e0-4c49-bf94-ad9bad099fda service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.426391] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d4896f1-c3e1-491a-9d6b-1372354c7784 req-b6deedbf-e032-49cd-869e-b606cd28a940 service nova] Releasing lock "refresh_cache-1207585c-fb2a-43b7-aec2-c3a7889255a5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.437427] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782564, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.541810] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2209de42-02d8-4d86-a736-e42af7e9741b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.567746] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 907.570818] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1975f638-41b2-4d8f-8eae-6a082dcf0ac6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.578819] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 907.578819] env[68233]: value = "task-2782565" [ 907.578819] env[68233]: _type = "Task" [ 907.578819] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.585101] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74471f7d-80a9-4290-bb6b-3bd50c07ed23 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.591486] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.597265] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9215d557-3aa0-4d9f-8793-86a4d2b7a40b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.629531] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e32066-190a-42aa-9760-16b25ef42d1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.637862] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93967c6e-15cb-46af-a631-399dd29618e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.656446] env[68233]: DEBUG nova.compute.provider_tree [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.764589] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 907.765129] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Fetch image to [datastore1] OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e/OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 907.765129] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Downloading stream optimized image fa91d76b-2020-4abf-8837-92f1504eee4f to [datastore1] OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e/OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e.vmdk on the data store datastore1 as vApp {{(pid=68233) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 907.765219] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Downloading image file data fa91d76b-2020-4abf-8837-92f1504eee4f to the ESX as VM named 'OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e' {{(pid=68233) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 907.823144] env[68233]: DEBUG nova.network.neutron [-] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.848620] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09572406-4a2b-4d59-b647-26388df8ab42 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.858757] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47d8ad5f-179b-4f6c-82ca-d76e74a1836b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.872577] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 907.872577] env[68233]: value = "resgroup-9" [ 907.872577] env[68233]: _type = "ResourcePool" [ 907.872577] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 907.874720] env[68233]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4396c2ae-8c49-4fbe-8cf7-30a969cf6f4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.898064] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease: (returnval){ [ 907.898064] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b3fe44-a510-003d-3131-3bf4d6ac2f94" [ 907.898064] env[68233]: _type = "HttpNfcLease" [ 907.898064] env[68233]: } obtained for vApp import into resource pool (val){ [ 907.898064] env[68233]: value = "resgroup-9" [ 907.898064] env[68233]: _type = "ResourcePool" [ 907.898064] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 907.898384] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the lease: (returnval){ [ 907.898384] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b3fe44-a510-003d-3131-3bf4d6ac2f94" [ 907.898384] env[68233]: _type = "HttpNfcLease" [ 907.898384] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 907.910518] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 907.910518] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b3fe44-a510-003d-3131-3bf4d6ac2f94" [ 907.910518] env[68233]: _type = "HttpNfcLease" [ 907.910518] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 907.932428] env[68233]: DEBUG nova.compute.manager [req-b8db041b-57f8-434d-9d56-416c2215a94f req-89d24424-39e0-4c49-bf94-ad9bad099fda service nova] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Detach interface failed, port_id=f6902fc8-b8ad-4c4c-8056-c9fbf48669b9, reason: Instance abdf9de2-8563-4a31-91a3-0c18b0387533 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 907.941763] env[68233]: DEBUG oslo_vmware.api [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782564, 'name': PowerOnVM_Task, 'duration_secs': 0.5663} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.941763] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.941915] env[68233]: INFO nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Took 10.11 seconds to spawn the instance on the hypervisor. [ 907.942168] env[68233]: DEBUG nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.943027] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded4d1dc-d076-44bd-bad1-21284f7a3bf8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.090256] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782565, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.161036] env[68233]: DEBUG nova.scheduler.client.report [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 908.324352] env[68233]: INFO nova.compute.manager [-] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Took 1.47 seconds to deallocate network for instance. [ 908.407022] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.407022] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b3fe44-a510-003d-3131-3bf4d6ac2f94" [ 908.407022] env[68233]: _type = "HttpNfcLease" [ 908.407022] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 908.462673] env[68233]: INFO nova.compute.manager [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Took 34.40 seconds to build instance. [ 908.590575] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782565, 'name': PowerOffVM_Task, 'duration_secs': 0.630151} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.590864] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.596228] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 908.596548] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4847fc1-2dac-489a-8ba6-dfdd8de80d39 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.614851] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 908.614851] env[68233]: value = "task-2782567" [ 908.614851] env[68233]: _type = "Task" [ 908.614851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.624647] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782567, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.666976] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.647s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.667601] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 908.670464] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.490s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.670710] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.672977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.900s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.674623] env[68233]: INFO nova.compute.claims [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.704573] env[68233]: INFO nova.scheduler.client.report [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleted allocations for instance 72467d49-6fa8-42db-871e-4e50e77eedf7 [ 908.832074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.907064] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 908.907064] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b3fe44-a510-003d-3131-3bf4d6ac2f94" [ 908.907064] env[68233]: _type = "HttpNfcLease" [ 908.907064] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 908.907389] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 908.907389] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b3fe44-a510-003d-3131-3bf4d6ac2f94" [ 908.907389] env[68233]: _type = "HttpNfcLease" [ 908.907389] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 908.908231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da3f3cb-c6d4-4dac-b353-e66f5beb5c70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.916313] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fd7783-2816-3d13-febc-11ec45643c25/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 908.916467] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fd7783-2816-3d13-febc-11ec45643c25/disk-0.vmdk. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 908.972466] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8f9509f7-49fc-4942-8361-24bd40b87dc7 tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "903f0919-b321-4d74-9ea2-bc9771184ded" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.751s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.981106] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bc34e3ac-b12e-4091-86d4-6eb5d6faaab2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.087997] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527050a8-c145-73f6-65eb-410aced2cb67/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 909.088927] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d54a8bd-bf1e-4bb8-9c51-c46711b3fe62 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.094855] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527050a8-c145-73f6-65eb-410aced2cb67/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 909.095045] env[68233]: ERROR oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527050a8-c145-73f6-65eb-410aced2cb67/disk-0.vmdk due to incomplete transfer. [ 909.095277] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d6830332-c623-4713-a3cf-98a4fe4b98d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.111409] env[68233]: DEBUG oslo_vmware.rw_handles [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527050a8-c145-73f6-65eb-410aced2cb67/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 909.111875] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Uploaded image 1d321d53-6cba-413d-adaa-5e6e9400cdca to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 909.114526] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 909.114835] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f648b997-576a-4c45-a357-11fad0d4f3ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.126654] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782567, 'name': ReconfigVM_Task, 'duration_secs': 0.32489} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.127986] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 909.128211] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.129373] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 909.129373] env[68233]: value = "task-2782568" [ 909.129373] env[68233]: _type = "Task" [ 909.129373] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.129660] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8873bd51-f158-4fcf-9f89-d939a5a75b41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.143356] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 909.143356] env[68233]: value = "task-2782569" [ 909.143356] env[68233]: _type = "Task" [ 909.143356] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.153385] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.180303] env[68233]: DEBUG nova.compute.utils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 909.186107] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 909.186377] env[68233]: DEBUG nova.network.neutron [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 909.213053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22c7d957-6d47-422d-84ee-9a520617e9a1 tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "72467d49-6fa8-42db-871e-4e50e77eedf7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.284s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.269351] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "0f813d55-2737-44ae-b62d-3321e77dfdab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.269661] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.003s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.269873] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "0f813d55-2737-44ae-b62d-3321e77dfdab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.270066] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.270236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.272678] env[68233]: INFO nova.compute.manager [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Terminating instance [ 909.279776] env[68233]: DEBUG nova.policy [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 909.478038] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "903f0919-b321-4d74-9ea2-bc9771184ded" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.480542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "903f0919-b321-4d74-9ea2-bc9771184ded" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.480542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "903f0919-b321-4d74-9ea2-bc9771184ded-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.480542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "903f0919-b321-4d74-9ea2-bc9771184ded-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.480542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "903f0919-b321-4d74-9ea2-bc9771184ded-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 909.483192] env[68233]: INFO nova.compute.manager [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Terminating instance [ 909.642768] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782568, 'name': Destroy_Task, 'duration_secs': 0.400126} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.643322] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Destroyed the VM [ 909.643322] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 909.643476] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-81eabaac-16bd-41c9-ba15-a099bcddbb97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.655441] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 909.655441] env[68233]: value = "task-2782570" [ 909.655441] env[68233]: _type = "Task" [ 909.655441] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.656473] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782569, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.666405] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782570, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.683841] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 909.770381] env[68233]: DEBUG nova.network.neutron [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Successfully created port: 008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.776495] env[68233]: DEBUG nova.compute.manager [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.776660] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.777534] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ae8662-cbad-45dd-a284-c7034e134b6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.785383] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.787801] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52e1cd95-95a1-4498-ae0c-3058516a4e1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.794321] env[68233]: DEBUG oslo_vmware.api [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 909.794321] env[68233]: value = "task-2782571" [ 909.794321] env[68233]: _type = "Task" [ 909.794321] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.801899] env[68233]: DEBUG oslo_vmware.api [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.992606] env[68233]: DEBUG nova.compute.manager [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.992911] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.994076] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c1e9d8-7bb4-4111-b12b-179688bbda1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.002455] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.002722] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-807a0b1d-8817-405c-b0d2-51a5c160c8fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.012552] env[68233]: DEBUG oslo_vmware.api [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 910.012552] env[68233]: value = "task-2782572" [ 910.012552] env[68233]: _type = "Task" [ 910.012552] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.025958] env[68233]: DEBUG oslo_vmware.api [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.125416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.126027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.126200] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.126437] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.126528] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.128967] env[68233]: INFO nova.compute.manager [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Terminating instance [ 910.166485] env[68233]: DEBUG oslo_vmware.api [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782569, 'name': PowerOnVM_Task, 'duration_secs': 0.878246} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.167493] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.168657] env[68233]: DEBUG nova.compute.manager [None req-2c2ef7b6-e392-44bf-b5bf-f842180621a2 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.172921] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a116994a-272a-4dba-b8bf-3b249400fc2c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.175653] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782570, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.288650] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ab00b2-6f24-4319-bc8c-79be4327e7af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.299705] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95501e4-73d0-44dd-99cc-ecc0690c0121 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.308894] env[68233]: DEBUG oslo_vmware.api [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782571, 'name': PowerOffVM_Task, 'duration_secs': 0.371343} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.334732] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.334945] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.335415] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-719585d4-ff79-46f0-b1e6-91906f166ba3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.337562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6e3c42-cba6-45ef-9401-5668937764ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.345753] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7944174-7eb7-4654-b928-20a5a36c135e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.359114] env[68233]: DEBUG nova.compute.provider_tree [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.419266] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.419542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.419731] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleting the datastore file [datastore2] 0f813d55-2737-44ae-b62d-3321e77dfdab {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.419999] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26ef6265-4df6-4bb7-81e2-3a243445be60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.426112] env[68233]: DEBUG oslo_vmware.api [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 910.426112] env[68233]: value = "task-2782574" [ 910.426112] env[68233]: _type = "Task" [ 910.426112] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.435906] env[68233]: DEBUG oslo_vmware.api [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.523701] env[68233]: DEBUG oslo_vmware.api [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782572, 'name': PowerOffVM_Task, 'duration_secs': 0.219017} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.523975] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 910.524176] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.524425] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33302718-b099-49cf-8441-dd1d3c983d21 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.600040] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.600040] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.600040] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleting the datastore file [datastore2] 903f0919-b321-4d74-9ea2-bc9771184ded {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.600263] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d093a08-c8bf-4e34-913e-a461d4dbffe6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.606434] env[68233]: DEBUG oslo_vmware.api [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for the task: (returnval){ [ 910.606434] env[68233]: value = "task-2782576" [ 910.606434] env[68233]: _type = "Task" [ 910.606434] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.614697] env[68233]: DEBUG oslo_vmware.api [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782576, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.633072] env[68233]: DEBUG nova.compute.manager [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 910.633072] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.633846] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d670c5-6284-4abe-829b-8815e7d86b06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.640494] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 910.640712] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fb43412-dc40-4980-95ed-108e0fe0cf32 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.646175] env[68233]: DEBUG oslo_vmware.api [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 910.646175] env[68233]: value = "task-2782577" [ 910.646175] env[68233]: _type = "Task" [ 910.646175] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.653866] env[68233]: DEBUG oslo_vmware.api [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.667142] env[68233]: DEBUG oslo_vmware.api [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782570, 'name': RemoveSnapshot_Task, 'duration_secs': 0.561415} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.667541] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 910.667890] env[68233]: INFO nova.compute.manager [None req-cf40cb75-015a-4813-8521-90952268c5ab tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Took 16.73 seconds to snapshot the instance on the hypervisor. [ 910.698218] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 910.728549] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 910.728847] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.729065] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.729306] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.729500] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.729666] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 910.729881] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 910.730122] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 910.730343] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 910.730583] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 910.730806] env[68233]: DEBUG nova.virt.hardware [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 910.731792] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3172ff59-4806-4770-8e93-2817edfe635c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.740950] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3084cfad-5ba8-4de6-89aa-7ce37a5eb36f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.863158] env[68233]: DEBUG nova.scheduler.client.report [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.936597] env[68233]: DEBUG oslo_vmware.api [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124582} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.936855] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.937055] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.937243] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.937419] env[68233]: INFO nova.compute.manager [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Took 1.16 seconds to destroy the instance on the hypervisor. [ 910.937664] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.937858] env[68233]: DEBUG nova.compute.manager [-] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.938400] env[68233]: DEBUG nova.network.neutron [-] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.116875] env[68233]: DEBUG oslo_vmware.api [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Task: {'id': task-2782576, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153327} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.117265] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.117318] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.117759] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.117759] env[68233]: INFO nova.compute.manager [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Took 1.12 seconds to destroy the instance on the hypervisor. [ 911.117960] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.118282] env[68233]: DEBUG nova.compute.manager [-] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.118390] env[68233]: DEBUG nova.network.neutron [-] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.156934] env[68233]: DEBUG oslo_vmware.api [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782577, 'name': PowerOffVM_Task, 'duration_secs': 0.21271} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.156934] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.156934] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 911.157134] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39648ee9-84dc-463a-bdc8-f02dd829cb2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.215085] env[68233]: DEBUG nova.compute.manager [req-23ca2b0a-862c-4af2-bffd-f105122cb1fa req-7f5b106f-9d6c-485c-a22b-e367d72b93a8 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Received event network-vif-deleted-6e108ab9-42e1-4bcb-99cf-f0628d6e4692 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 911.215383] env[68233]: INFO nova.compute.manager [req-23ca2b0a-862c-4af2-bffd-f105122cb1fa req-7f5b106f-9d6c-485c-a22b-e367d72b93a8 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Neutron deleted interface 6e108ab9-42e1-4bcb-99cf-f0628d6e4692; detaching it from the instance and deleting it from the info cache [ 911.215469] env[68233]: DEBUG nova.network.neutron [req-23ca2b0a-862c-4af2-bffd-f105122cb1fa req-7f5b106f-9d6c-485c-a22b-e367d72b93a8 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.244768] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 911.245191] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 911.245522] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleting the datastore file [datastore2] 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 911.245881] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a7a49bf-0dab-4780-91d8-db2e811cd104 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.256472] env[68233]: DEBUG oslo_vmware.api [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for the task: (returnval){ [ 911.256472] env[68233]: value = "task-2782579" [ 911.256472] env[68233]: _type = "Task" [ 911.256472] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.264127] env[68233]: DEBUG oslo_vmware.api [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782579, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.367935] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.368740] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 911.371906] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.154s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.372182] env[68233]: DEBUG nova.objects.instance [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lazy-loading 'resources' on Instance uuid 22c06baf-6316-4531-8037-b8b77c401596 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.538792] env[68233]: DEBUG nova.compute.manager [req-41aa2b0c-f8de-446d-a3e5-fe458b497f58 req-9124eda8-a770-41a3-aaa9-eed759e7e312 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Received event network-vif-plugged-008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 911.539051] env[68233]: DEBUG oslo_concurrency.lockutils [req-41aa2b0c-f8de-446d-a3e5-fe458b497f58 req-9124eda8-a770-41a3-aaa9-eed759e7e312 service nova] Acquiring lock "3f79709a-b8b7-4838-8731-d051155ff4f3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.539287] env[68233]: DEBUG oslo_concurrency.lockutils [req-41aa2b0c-f8de-446d-a3e5-fe458b497f58 req-9124eda8-a770-41a3-aaa9-eed759e7e312 service nova] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.539477] env[68233]: DEBUG oslo_concurrency.lockutils [req-41aa2b0c-f8de-446d-a3e5-fe458b497f58 req-9124eda8-a770-41a3-aaa9-eed759e7e312 service nova] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.539652] env[68233]: DEBUG nova.compute.manager [req-41aa2b0c-f8de-446d-a3e5-fe458b497f58 req-9124eda8-a770-41a3-aaa9-eed759e7e312 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] No waiting events found dispatching network-vif-plugged-008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.539820] env[68233]: WARNING nova.compute.manager [req-41aa2b0c-f8de-446d-a3e5-fe458b497f58 req-9124eda8-a770-41a3-aaa9-eed759e7e312 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Received unexpected event network-vif-plugged-008c3f5c-f83a-4833-99e9-7aa70aff0c0b for instance with vm_state building and task_state spawning. [ 911.688773] env[68233]: DEBUG nova.network.neutron [-] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.718362] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc733f8f-c6e5-44de-bfb8-6aa33ff03518 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.731129] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e16950d-8bc3-485b-aead-a0a5e6110177 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.775635] env[68233]: DEBUG nova.compute.manager [req-23ca2b0a-862c-4af2-bffd-f105122cb1fa req-7f5b106f-9d6c-485c-a22b-e367d72b93a8 service nova] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Detach interface failed, port_id=6e108ab9-42e1-4bcb-99cf-f0628d6e4692, reason: Instance 0f813d55-2737-44ae-b62d-3321e77dfdab could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 911.781394] env[68233]: DEBUG nova.network.neutron [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Successfully updated port: 008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.788099] env[68233]: DEBUG oslo_vmware.api [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Task: {'id': task-2782579, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140836} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.788311] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.788610] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.788830] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.789019] env[68233]: INFO nova.compute.manager [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 911.789264] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.791161] env[68233]: DEBUG nova.compute.manager [-] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 911.791399] env[68233]: DEBUG nova.network.neutron [-] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 911.875142] env[68233]: DEBUG nova.compute.utils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.880505] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.880505] env[68233]: DEBUG nova.network.neutron [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.953699] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "3cca16e1-3363-4026-9359-4ed2ba41e25d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.954056] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.954377] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "3cca16e1-3363-4026-9359-4ed2ba41e25d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.954592] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.954787] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.957271] env[68233]: INFO nova.compute.manager [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Terminating instance [ 911.976228] env[68233]: DEBUG nova.policy [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 912.081732] env[68233]: DEBUG nova.network.neutron [-] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.191768] env[68233]: INFO nova.compute.manager [-] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Took 1.25 seconds to deallocate network for instance. [ 912.203838] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 912.204131] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fd7783-2816-3d13-febc-11ec45643c25/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 912.205209] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c36daa-16e6-405b-b9ea-a38f0844107b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.222308] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fd7783-2816-3d13-febc-11ec45643c25/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 912.222433] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fd7783-2816-3d13-febc-11ec45643c25/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 912.223066] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-4a4ca949-da2f-428e-b3f8-d945c829038b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.284098] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-3f79709a-b8b7-4838-8731-d051155ff4f3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.284255] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-3f79709a-b8b7-4838-8731-d051155ff4f3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.284400] env[68233]: DEBUG nova.network.neutron [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.381793] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.464139] env[68233]: DEBUG nova.compute.manager [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 912.464476] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.467590] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1b2a88-3d11-403f-934d-9a2b514d6768 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.472102] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3efbf61-0aa9-4c46-bebf-241d81d324e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.481015] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.483075] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef8411f7-5f26-4c15-9a7f-753b2cd70ad9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.485705] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3670efa4-cfc2-4112-9f6c-6e32fed7b09f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.493738] env[68233]: DEBUG oslo_vmware.api [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 912.493738] env[68233]: value = "task-2782580" [ 912.493738] env[68233]: _type = "Task" [ 912.493738] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.526100] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af07f46d-37c5-40ec-bfae-8683b90e0497 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.539517] env[68233]: DEBUG oslo_vmware.api [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782580, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.541991] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572a90ee-1bc5-4dbb-a835-c0958518a230 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.558534] env[68233]: DEBUG nova.compute.provider_tree [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.586218] env[68233]: INFO nova.compute.manager [-] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Took 1.47 seconds to deallocate network for instance. [ 912.595055] env[68233]: DEBUG oslo_vmware.rw_handles [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fd7783-2816-3d13-febc-11ec45643c25/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 912.595055] env[68233]: INFO nova.virt.vmwareapi.images [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Downloaded image file data fa91d76b-2020-4abf-8837-92f1504eee4f [ 912.595948] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7c3cec-5f46-4c8b-9426-4a88c6e585fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.611466] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ce85a0d-a008-4db2-b77e-aed2499a915d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.639020] env[68233]: INFO nova.virt.vmwareapi.images [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] The imported VM was unregistered [ 912.639020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 912.639020] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating directory with path [datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.640030] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-657120d7-dc2c-4806-a591-fa68074ad1a1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.655790] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created directory with path [datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.655790] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e/OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e.vmdk to [datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk. {{(pid=68233) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 912.655990] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ec8b9cc9-f7d8-4b5f-91c7-54d4d73fbbf6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.664387] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 912.664387] env[68233]: value = "task-2782582" [ 912.664387] env[68233]: _type = "Task" [ 912.664387] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.672394] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782582, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.716330] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.857356] env[68233]: DEBUG nova.network.neutron [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.860985] env[68233]: DEBUG nova.network.neutron [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Successfully created port: e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.955342] env[68233]: DEBUG nova.network.neutron [-] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.035017] env[68233]: DEBUG oslo_vmware.api [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782580, 'name': PowerOffVM_Task, 'duration_secs': 0.19607} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.035324] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.035498] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.035788] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdcb8970-62f1-48fe-82b0-64f21f227412 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.062431] env[68233]: DEBUG nova.scheduler.client.report [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 913.097880] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.107078] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.107478] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.107688] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Deleting the datastore file [datastore2] 3cca16e1-3363-4026-9359-4ed2ba41e25d {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.107984] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3eb1b6bb-048f-40f3-b96a-3c8ddf89f3fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.115685] env[68233]: DEBUG oslo_vmware.api [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 913.115685] env[68233]: value = "task-2782584" [ 913.115685] env[68233]: _type = "Task" [ 913.115685] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.125400] env[68233]: DEBUG oslo_vmware.api [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782584, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.179186] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782582, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.180978] env[68233]: DEBUG nova.network.neutron [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Updating instance_info_cache with network_info: [{"id": "008c3f5c-f83a-4833-99e9-7aa70aff0c0b", "address": "fa:16:3e:4e:77:ea", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008c3f5c-f8", "ovs_interfaceid": "008c3f5c-f83a-4833-99e9-7aa70aff0c0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.396019] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.418196] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 913.418524] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.418634] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 913.418816] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.418960] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 913.419125] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 913.419351] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 913.419547] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 913.419723] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 913.419890] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 913.420078] env[68233]: DEBUG nova.virt.hardware [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 913.420981] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b823e873-068c-4c81-90f7-ec749e4b319e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.429352] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca36799-f0b5-4f1a-9c19-4b5b9878c56f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.460959] env[68233]: INFO nova.compute.manager [-] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Took 1.67 seconds to deallocate network for instance. [ 913.567863] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.196s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.570883] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.675s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.572874] env[68233]: INFO nova.compute.claims [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.595618] env[68233]: INFO nova.scheduler.client.report [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Deleted allocations for instance 22c06baf-6316-4531-8037-b8b77c401596 [ 913.602620] env[68233]: DEBUG nova.compute.manager [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Received event network-vif-deleted-80978e6e-5b93-41d0-98a9-b2ca242940e5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 913.602620] env[68233]: DEBUG nova.compute.manager [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Received event network-changed-008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 913.602620] env[68233]: DEBUG nova.compute.manager [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Refreshing instance network info cache due to event network-changed-008c3f5c-f83a-4833-99e9-7aa70aff0c0b. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 913.602791] env[68233]: DEBUG oslo_concurrency.lockutils [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] Acquiring lock "refresh_cache-3f79709a-b8b7-4838-8731-d051155ff4f3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.627859] env[68233]: DEBUG oslo_vmware.api [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782584, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20439} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.628172] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.628359] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.628537] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.628713] env[68233]: INFO nova.compute.manager [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 913.628958] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.629460] env[68233]: DEBUG nova.compute.manager [-] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 913.629563] env[68233]: DEBUG nova.network.neutron [-] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.675616] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782582, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.682720] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-3f79709a-b8b7-4838-8731-d051155ff4f3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.683120] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Instance network_info: |[{"id": "008c3f5c-f83a-4833-99e9-7aa70aff0c0b", "address": "fa:16:3e:4e:77:ea", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008c3f5c-f8", "ovs_interfaceid": "008c3f5c-f83a-4833-99e9-7aa70aff0c0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.683471] env[68233]: DEBUG oslo_concurrency.lockutils [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] Acquired lock "refresh_cache-3f79709a-b8b7-4838-8731-d051155ff4f3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.683768] env[68233]: DEBUG nova.network.neutron [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Refreshing network info cache for port 008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 913.685811] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:77:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '008c3f5c-f83a-4833-99e9-7aa70aff0c0b', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.695393] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.699129] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.699129] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a382b9b-7036-4f0b-8609-ab6de8722106 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.720175] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.720175] env[68233]: value = "task-2782585" [ 913.720175] env[68233]: _type = "Task" [ 913.720175] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.728956] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782585, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.968041] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.107502] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b8a2fcd4-9055-40b7-b33f-b22040cb7208 tempest-ImagesOneServerNegativeTestJSON-673958444 tempest-ImagesOneServerNegativeTestJSON-673958444-project-member] Lock "22c06baf-6316-4531-8037-b8b77c401596" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.766s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.119289] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "8880bb83-56f1-4ad2-9d6d-1885826aed21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.119568] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.119894] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "8880bb83-56f1-4ad2-9d6d-1885826aed21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.120307] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.120519] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.122945] env[68233]: INFO nova.compute.manager [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Terminating instance [ 914.176253] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782582, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.236027] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782585, 'name': CreateVM_Task, 'duration_secs': 0.415823} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.237415] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.238231] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.238451] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.238904] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.239537] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf409249-1e28-4493-81db-073a496126e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.244884] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 914.244884] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263cbe8-5ca7-82cf-ef40-8cca8a6e6c47" [ 914.244884] env[68233]: _type = "Task" [ 914.244884] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.258021] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263cbe8-5ca7-82cf-ef40-8cca8a6e6c47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.551955] env[68233]: DEBUG nova.network.neutron [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Updated VIF entry in instance network info cache for port 008c3f5c-f83a-4833-99e9-7aa70aff0c0b. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.552537] env[68233]: DEBUG nova.network.neutron [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Updating instance_info_cache with network_info: [{"id": "008c3f5c-f83a-4833-99e9-7aa70aff0c0b", "address": "fa:16:3e:4e:77:ea", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008c3f5c-f8", "ovs_interfaceid": "008c3f5c-f83a-4833-99e9-7aa70aff0c0b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.623789] env[68233]: DEBUG nova.network.neutron [-] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.627724] env[68233]: DEBUG nova.compute.manager [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 914.628197] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 914.629292] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936df5c7-90c4-4a35-8f91-2c59f3878095 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.644555] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.644555] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89debc42-c501-462b-a2ca-638b5a974b37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.653226] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 914.653226] env[68233]: value = "task-2782586" [ 914.653226] env[68233]: _type = "Task" [ 914.653226] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.675109] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.682140] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782582, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.756698] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263cbe8-5ca7-82cf-ef40-8cca8a6e6c47, 'name': SearchDatastore_Task, 'duration_secs': 0.080979} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.757186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.757513] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.759533] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.759533] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.759533] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.759533] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7133d32-490e-4f8f-b38d-fc67dc3a8896 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.767668] env[68233]: DEBUG nova.network.neutron [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Successfully updated port: e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.772030] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.772300] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.773208] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7c088d1-7bd6-49b1-a526-6130c36161e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.779521] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 914.779521] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528254b3-3eae-c418-ae3d-05686d7c0a3b" [ 914.779521] env[68233]: _type = "Task" [ 914.779521] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.792510] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528254b3-3eae-c418-ae3d-05686d7c0a3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.057226] env[68233]: DEBUG oslo_concurrency.lockutils [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] Releasing lock "refresh_cache-3f79709a-b8b7-4838-8731-d051155ff4f3" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.057493] env[68233]: DEBUG nova.compute.manager [req-da67be6d-9dff-4c05-96d2-fa9e46c0bce2 req-9ea58e01-bd85-42df-ab47-48c9acab2356 service nova] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Received event network-vif-deleted-749b7b47-864a-4c70-804b-9e57cc1b14a5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.066480] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-591c71f9-04c6-4d95-aa3e-1d7c3d15a42e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.074331] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b51f2ae-9763-442d-81ab-5725333e08ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.110718] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9750950b-d456-4d74-92f3-36f42fdb165f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.118374] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325d664e-4382-4393-bb43-c0cbdf812fca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.132652] env[68233]: INFO nova.compute.manager [-] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Took 1.50 seconds to deallocate network for instance. [ 915.133274] env[68233]: DEBUG nova.compute.provider_tree [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.167879] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782586, 'name': PowerOffVM_Task, 'duration_secs': 0.396427} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.168251] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.168421] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.171790] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33bbaf7b-3523-4611-907b-e5fa7f44585f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.179246] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782582, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.438752} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.179477] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e/OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e.vmdk to [datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk. [ 915.179663] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Cleaning up location [datastore1] OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 915.179825] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_28d6fd48-91c6-4d62-a204-ac15b08eb16e {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.180060] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f04bb103-3bfe-42bd-991b-4c7b19a21275 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.186534] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 915.186534] env[68233]: value = "task-2782588" [ 915.186534] env[68233]: _type = "Task" [ 915.186534] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.195138] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.238736] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.238972] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.239221] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Deleting the datastore file [datastore2] 8880bb83-56f1-4ad2-9d6d-1885826aed21 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.239431] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dfb65b4f-7d0b-4aa2-a06a-03337beb1382 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.246353] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 915.246353] env[68233]: value = "task-2782589" [ 915.246353] env[68233]: _type = "Task" [ 915.246353] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.254862] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.271670] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.271670] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.271670] env[68233]: DEBUG nova.network.neutron [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.290258] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528254b3-3eae-c418-ae3d-05686d7c0a3b, 'name': SearchDatastore_Task, 'duration_secs': 0.091585} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.291123] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20a0e298-89f1-4a0a-9184-951bc3713bfb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.296797] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 915.296797] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524ed39a-29f4-e7f4-c4bc-1ccf4216f4ca" [ 915.296797] env[68233]: _type = "Task" [ 915.296797] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.305098] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524ed39a-29f4-e7f4-c4bc-1ccf4216f4ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.631881] env[68233]: DEBUG nova.compute.manager [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Received event network-vif-deleted-f3dc9bf3-0d50-44df-89a6-52cfc6899cad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.632423] env[68233]: DEBUG nova.compute.manager [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-vif-plugged-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.632658] env[68233]: DEBUG oslo_concurrency.lockutils [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.632901] env[68233]: DEBUG oslo_concurrency.lockutils [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] Lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.633154] env[68233]: DEBUG oslo_concurrency.lockutils [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] Lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.633287] env[68233]: DEBUG nova.compute.manager [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] No waiting events found dispatching network-vif-plugged-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 915.633466] env[68233]: WARNING nova.compute.manager [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received unexpected event network-vif-plugged-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de for instance with vm_state building and task_state spawning. [ 915.633635] env[68233]: DEBUG nova.compute.manager [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-changed-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 915.633805] env[68233]: DEBUG nova.compute.manager [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Refreshing instance network info cache due to event network-changed-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 915.633984] env[68233]: DEBUG oslo_concurrency.lockutils [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] Acquiring lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.637209] env[68233]: DEBUG nova.scheduler.client.report [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.644031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.696627] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.053469} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.696926] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 915.697134] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.697381] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk to [datastore1] 1207585c-fb2a-43b7-aec2-c3a7889255a5/1207585c-fb2a-43b7-aec2-c3a7889255a5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.697640] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ebecff32-007b-4c6d-9b85-e6736a7bc19b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.705296] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 915.705296] env[68233]: value = "task-2782590" [ 915.705296] env[68233]: _type = "Task" [ 915.705296] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.713537] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.758958] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.802793] env[68233]: DEBUG nova.network.neutron [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.808776] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524ed39a-29f4-e7f4-c4bc-1ccf4216f4ca, 'name': SearchDatastore_Task, 'duration_secs': 0.060643} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.809271] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.809620] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 3f79709a-b8b7-4838-8731-d051155ff4f3/3f79709a-b8b7-4838-8731-d051155ff4f3.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.809908] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b614315-0a3e-468c-b6b0-097ba736d7e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.816169] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 915.816169] env[68233]: value = "task-2782591" [ 915.816169] env[68233]: _type = "Task" [ 915.816169] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.824278] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782591, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.961541] env[68233]: DEBUG nova.network.neutron [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.146012] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.146012] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 916.147679] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.727s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.149367] env[68233]: INFO nova.compute.claims [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 916.218335] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.258223] env[68233]: DEBUG oslo_vmware.api [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.596526} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.258616] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.258870] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.259147] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.259461] env[68233]: INFO nova.compute.manager [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Took 1.63 seconds to destroy the instance on the hypervisor. [ 916.259774] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.260104] env[68233]: DEBUG nova.compute.manager [-] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.260203] env[68233]: DEBUG nova.network.neutron [-] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.330306] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782591, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.467015] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.467399] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Instance network_info: |[{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.467722] env[68233]: DEBUG oslo_concurrency.lockutils [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] Acquired lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.467905] env[68233]: DEBUG nova.network.neutron [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Refreshing network info cache for port e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.469219] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:76:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.481681] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Creating folder: Project (74638e02258142a1a5170178faabb0ca). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.482945] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4803892c-1de8-4e94-be75-cf40b3971170 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.498389] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Created folder: Project (74638e02258142a1a5170178faabb0ca) in parent group-v559223. [ 916.498694] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Creating folder: Instances. Parent ref: group-v559431. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 916.498855] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9bdca72-be7f-4c90-b699-3985b35616af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.513250] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Created folder: Instances in parent group-v559431. [ 916.513626] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.513717] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.513921] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a556871-5e46-489b-9d41-73c7eb06e797 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.538295] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.538295] env[68233]: value = "task-2782594" [ 916.538295] env[68233]: _type = "Task" [ 916.538295] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.551258] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782594, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.657573] env[68233]: DEBUG nova.compute.utils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.663070] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.663070] env[68233]: DEBUG nova.network.neutron [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.705822] env[68233]: DEBUG nova.policy [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '309fbc1ccdf44918a272d8cd64c63af0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a69000592d412587562d2d0f890515', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 916.723278] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.835326] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782591, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.054192] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782594, 'name': CreateVM_Task, 'duration_secs': 0.429415} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.054428] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.055252] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.055423] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.055842] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.056232] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a121092-3d85-448f-83d3-32abcc797610 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.067042] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 917.067042] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522fd8cb-1a0a-8eea-1a51-83c514c43823" [ 917.067042] env[68233]: _type = "Task" [ 917.067042] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.081401] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522fd8cb-1a0a-8eea-1a51-83c514c43823, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.112534] env[68233]: DEBUG nova.network.neutron [-] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.163855] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 917.234069] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.318961] env[68233]: DEBUG nova.network.neutron [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Successfully created port: b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.338867] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782591, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.243729} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.342320] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 3f79709a-b8b7-4838-8731-d051155ff4f3/3f79709a-b8b7-4838-8731-d051155ff4f3.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.342660] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.345604] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d5f1d66-0132-448f-a13e-984c3f7a3b18 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.358460] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 917.358460] env[68233]: value = "task-2782595" [ 917.358460] env[68233]: _type = "Task" [ 917.358460] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.375429] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.583142] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522fd8cb-1a0a-8eea-1a51-83c514c43823, 'name': SearchDatastore_Task, 'duration_secs': 0.093643} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.583435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.583696] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.584999] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.584999] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.584999] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.585175] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6a3e8d0-e45a-4476-b4a3-93f2741550b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.607978] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.608231] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.609152] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a405c80e-8d16-46f5-b601-a8488f9acf53 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.611980] env[68233]: INFO nova.compute.manager [-] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Took 1.35 seconds to deallocate network for instance. [ 917.618990] env[68233]: DEBUG nova.network.neutron [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updated VIF entry in instance network info cache for port e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.619397] env[68233]: DEBUG nova.network.neutron [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.622394] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 917.622394] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5216ace8-da9b-9dcb-4e19-85c7bf6e681a" [ 917.622394] env[68233]: _type = "Task" [ 917.622394] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.638681] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5216ace8-da9b-9dcb-4e19-85c7bf6e681a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.704757] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8721cd1b-e82b-4129-b35f-e45a3bb4ac8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.726465] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fad0137-944d-4b10-b070-f5e0acc6c5f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.729492] env[68233]: DEBUG nova.compute.manager [req-ef4c3349-82d7-464b-8363-d84958ae2a07 req-53c111dc-c102-42fe-aceb-dfba88ddad1b service nova] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Received event network-vif-deleted-d6e53ccd-3915-433a-a12d-2a02f08a9ab2 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 917.764231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b30aa4-28d2-4100-a2a0-84fb7a93b732 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.767289] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.780026] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f4fa72-2f98-4234-a0fa-e17363d79201 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.800111] env[68233]: DEBUG nova.compute.provider_tree [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.874124] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074265} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.874719] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.876107] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b2e1d6-7e2b-460d-8b00-cd425b5a7064 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.913451] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 3f79709a-b8b7-4838-8731-d051155ff4f3/3f79709a-b8b7-4838-8731-d051155ff4f3.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.913451] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a71a57f-08ef-47a3-b89b-1db8d8b8bcc3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.935654] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 917.935654] env[68233]: value = "task-2782596" [ 917.935654] env[68233]: _type = "Task" [ 917.935654] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.949222] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782596, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.118981] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.124024] env[68233]: DEBUG oslo_concurrency.lockutils [req-9202dc27-f63f-404e-a38c-33698178ad98 req-ee9df561-6c42-4ff4-aae8-f8502c539d74 service nova] Releasing lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.139260] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5216ace8-da9b-9dcb-4e19-85c7bf6e681a, 'name': SearchDatastore_Task, 'duration_secs': 0.093449} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.140533] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f7a38ab-272e-46a8-8995-51fc25d96fbb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.151244] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 918.151244] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52851cef-3a72-18f9-ae3d-4a51e5fcfdb7" [ 918.151244] env[68233]: _type = "Task" [ 918.151244] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.164633] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52851cef-3a72-18f9-ae3d-4a51e5fcfdb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.182376] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 918.218984] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 918.219171] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 918.219234] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 918.219440] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 918.219596] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 918.219744] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 918.219952] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 918.220124] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 918.220294] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 918.220462] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 918.220636] env[68233]: DEBUG nova.virt.hardware [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 918.221831] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33c39cb-a4eb-42ad-aa53-dc8a43895c42 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.235620] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.239899] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8572c4e2-57c7-4660-955d-bf50d459e203 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.305025] env[68233]: DEBUG nova.scheduler.client.report [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 918.451099] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782596, 'name': ReconfigVM_Task, 'duration_secs': 0.35471} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.451483] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 3f79709a-b8b7-4838-8731-d051155ff4f3/3f79709a-b8b7-4838-8731-d051155ff4f3.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 918.452225] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-420def6c-b81f-4d4c-ba09-60e263d16b16 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.463294] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 918.463294] env[68233]: value = "task-2782597" [ 918.463294] env[68233]: _type = "Task" [ 918.463294] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.477813] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782597, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.674122] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52851cef-3a72-18f9-ae3d-4a51e5fcfdb7, 'name': SearchDatastore_Task, 'duration_secs': 0.094437} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.679765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.679765] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 73ca71c0-34cd-4393-82ff-4b297d350209/73ca71c0-34cd-4393-82ff-4b297d350209.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.679765] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d708ca4-a320-4bcb-b2b3-51c9733b97c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.693442] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 918.693442] env[68233]: value = "task-2782598" [ 918.693442] env[68233]: _type = "Task" [ 918.693442] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.715497] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782598, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.736349] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.811528] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.811528] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.814183] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.573s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.814644] env[68233]: DEBUG nova.objects.instance [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lazy-loading 'resources' on Instance uuid 3d759f4f-3845-4bb5-8cfa-639b7023bb27 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.977372] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782597, 'name': Rename_Task, 'duration_secs': 0.163789} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.977751] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.978154] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d66e21e-f6da-4b96-9013-36b2b0653583 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.991151] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 918.991151] env[68233]: value = "task-2782599" [ 918.991151] env[68233]: _type = "Task" [ 918.991151] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.003731] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.207724] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782598, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.233955] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782590, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.103307} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.234355] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fa91d76b-2020-4abf-8837-92f1504eee4f/fa91d76b-2020-4abf-8837-92f1504eee4f.vmdk to [datastore1] 1207585c-fb2a-43b7-aec2-c3a7889255a5/1207585c-fb2a-43b7-aec2-c3a7889255a5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.235237] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d17d72-2f23-4786-b13e-cd0b3aae8337 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.266675] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 1207585c-fb2a-43b7-aec2-c3a7889255a5/1207585c-fb2a-43b7-aec2-c3a7889255a5.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 919.270022] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-809c71d2-f954-440a-8c2d-de7a1dd51547 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.286300] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 919.286300] env[68233]: value = "task-2782600" [ 919.286300] env[68233]: _type = "Task" [ 919.286300] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.295018] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.315774] env[68233]: DEBUG nova.compute.utils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 919.317498] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 919.317798] env[68233]: DEBUG nova.network.neutron [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.503836] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.541831] env[68233]: DEBUG nova.policy [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6f4dee9aa3640bd88dbc66a497ee3d8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '978c6dbf1c10443da3253a58f1e5bdea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.708713] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782598, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542094} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.709108] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 73ca71c0-34cd-4393-82ff-4b297d350209/73ca71c0-34cd-4393-82ff-4b297d350209.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.709378] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.709683] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97be62a8-0f78-4fc1-bd8b-410ef14d9f97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.720040] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 919.720040] env[68233]: value = "task-2782601" [ 919.720040] env[68233]: _type = "Task" [ 919.720040] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.735791] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.799740] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782600, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.825130] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.837089] env[68233]: DEBUG nova.compute.manager [req-f2867424-5def-4d96-be5b-60fafeebe416 req-164794a9-1bc2-4fab-94aa-b7f56599bf2f service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Received event network-vif-plugged-b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 919.837329] env[68233]: DEBUG oslo_concurrency.lockutils [req-f2867424-5def-4d96-be5b-60fafeebe416 req-164794a9-1bc2-4fab-94aa-b7f56599bf2f service nova] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.837583] env[68233]: DEBUG oslo_concurrency.lockutils [req-f2867424-5def-4d96-be5b-60fafeebe416 req-164794a9-1bc2-4fab-94aa-b7f56599bf2f service nova] Lock "827711ac-ef52-41a0-9029-0a1805522a08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.837703] env[68233]: DEBUG oslo_concurrency.lockutils [req-f2867424-5def-4d96-be5b-60fafeebe416 req-164794a9-1bc2-4fab-94aa-b7f56599bf2f service nova] Lock "827711ac-ef52-41a0-9029-0a1805522a08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.837867] env[68233]: DEBUG nova.compute.manager [req-f2867424-5def-4d96-be5b-60fafeebe416 req-164794a9-1bc2-4fab-94aa-b7f56599bf2f service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] No waiting events found dispatching network-vif-plugged-b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 919.838041] env[68233]: WARNING nova.compute.manager [req-f2867424-5def-4d96-be5b-60fafeebe416 req-164794a9-1bc2-4fab-94aa-b7f56599bf2f service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Received unexpected event network-vif-plugged-b710ae65-1e11-4b1c-8389-3094fbf99637 for instance with vm_state building and task_state spawning. [ 919.881444] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa94bff-8174-44c4-a3af-0420573f2979 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.889660] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca1c63e6-47a1-4e85-b5ff-0e5c9c706beb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.922802] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0246a219-25ee-4245-87e3-c382ae3bd5fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.931744] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac03de3-2acd-40aa-adea-a026605c7d65 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.950132] env[68233]: DEBUG nova.compute.provider_tree [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.007226] env[68233]: DEBUG oslo_vmware.api [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782599, 'name': PowerOnVM_Task, 'duration_secs': 0.517355} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.007751] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 920.008101] env[68233]: INFO nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Took 9.31 seconds to spawn the instance on the hypervisor. [ 920.011018] env[68233]: DEBUG nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 920.011018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb20932-6737-454b-a324-c9f658921f4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.229675] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.281495} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.233020] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.233020] env[68233]: DEBUG nova.network.neutron [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Successfully created port: e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.234043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e067217-ca23-4bbe-8480-12c4d2f1ea6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.259566] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 73ca71c0-34cd-4393-82ff-4b297d350209/73ca71c0-34cd-4393-82ff-4b297d350209.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.260057] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d623552-86e8-4f46-a912-90231a1300be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.282017] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 920.282017] env[68233]: value = "task-2782602" [ 920.282017] env[68233]: _type = "Task" [ 920.282017] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.290672] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782602, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.299376] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782600, 'name': ReconfigVM_Task, 'duration_secs': 0.74745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.299376] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 1207585c-fb2a-43b7-aec2-c3a7889255a5/1207585c-fb2a-43b7-aec2-c3a7889255a5.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 920.299761] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1ddf6140-d99a-4d93-b604-493d3a720024 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.308018] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 920.308018] env[68233]: value = "task-2782603" [ 920.308018] env[68233]: _type = "Task" [ 920.308018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.315638] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782603, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.379323] env[68233]: DEBUG nova.network.neutron [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Successfully updated port: b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.455085] env[68233]: DEBUG nova.scheduler.client.report [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.479954] env[68233]: DEBUG nova.compute.manager [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Received event network-changed-b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 920.481751] env[68233]: DEBUG nova.compute.manager [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Refreshing instance network info cache due to event network-changed-b710ae65-1e11-4b1c-8389-3094fbf99637. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 920.481903] env[68233]: DEBUG oslo_concurrency.lockutils [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] Acquiring lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.482137] env[68233]: DEBUG oslo_concurrency.lockutils [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] Acquired lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.482324] env[68233]: DEBUG nova.network.neutron [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Refreshing network info cache for port b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 920.528155] env[68233]: INFO nova.compute.manager [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Took 37.71 seconds to build instance. [ 920.794121] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782602, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.820644] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782603, 'name': Rename_Task, 'duration_secs': 0.418053} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.820644] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.820644] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44f72363-1223-4c87-89c8-928b3271db22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.827467] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 920.827467] env[68233]: value = "task-2782604" [ 920.827467] env[68233]: _type = "Task" [ 920.827467] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.838086] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.840274] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782604, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.868943] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 920.869701] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 920.869903] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 920.870121] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 920.870275] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 920.870425] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 920.870859] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 920.871079] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 920.871263] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 920.871466] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 920.871654] env[68233]: DEBUG nova.virt.hardware [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 920.872552] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40666fc3-4ced-4a6e-92ed-36d6c2b115b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.880510] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6028e2df-1581-4571-90f7-621c999fbde5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.885278] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.960806] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.147s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.964111] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.098s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.965521] env[68233]: DEBUG nova.objects.instance [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'resources' on Instance uuid c5c8bf0c-eb58-41bc-a316-b4ac78187658 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 921.002421] env[68233]: INFO nova.scheduler.client.report [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Deleted allocations for instance 3d759f4f-3845-4bb5-8cfa-639b7023bb27 [ 921.030883] env[68233]: DEBUG nova.network.neutron [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.031773] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be28ca8b-ca78-4eeb-994e-9abcb73b1552 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.219s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.184754] env[68233]: DEBUG nova.network.neutron [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.293516] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782602, 'name': ReconfigVM_Task, 'duration_secs': 0.758209} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.293812] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 73ca71c0-34cd-4393-82ff-4b297d350209/73ca71c0-34cd-4393-82ff-4b297d350209.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.294459] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51f66736-f131-472f-8e7f-f8856f8e4dee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.301384] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 921.301384] env[68233]: value = "task-2782605" [ 921.301384] env[68233]: _type = "Task" [ 921.301384] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.311873] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782605, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.340735] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782604, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.521193] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2b4e1d3-e7b0-4f20-b3b3-14155c8c7a17 tempest-ServerRescueTestJSONUnderV235-1978426536 tempest-ServerRescueTestJSONUnderV235-1978426536-project-member] Lock "3d759f4f-3845-4bb5-8cfa-639b7023bb27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.772s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.565826] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "876d428d-d5c9-422a-aba2-2d6c61b092db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.566096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.568423] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "876d428d-d5c9-422a-aba2-2d6c61b092db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.568423] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.568423] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.569969] env[68233]: INFO nova.compute.manager [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Terminating instance [ 921.690727] env[68233]: DEBUG oslo_concurrency.lockutils [req-b47f0acd-06f7-4a96-a1c6-ab6dc9595beb req-91d055eb-a3f0-4b2f-aad5-29e2e02062d1 service nova] Releasing lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.691188] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.691679] env[68233]: DEBUG nova.network.neutron [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.822448] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782605, 'name': Rename_Task, 'duration_secs': 0.191062} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.823180] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.823556] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-614c9dc2-f0e6-4209-954a-4c554b0bc17e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.834067] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 921.834067] env[68233]: value = "task-2782606" [ 921.834067] env[68233]: _type = "Task" [ 921.834067] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.842724] env[68233]: DEBUG oslo_vmware.api [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782604, 'name': PowerOnVM_Task, 'duration_secs': 0.532537} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.847331] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.847864] env[68233]: INFO nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Took 18.05 seconds to spawn the instance on the hypervisor. [ 921.848284] env[68233]: DEBUG nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 921.849796] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782606, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.854672] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fb1369-6653-425a-8977-90d8e58404c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.055809] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7615ee3e-445f-4844-aa01-c4239a9e182c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.065552] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a730a051-f5b0-485c-aca2-84ddfb309971 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.099118] env[68233]: DEBUG nova.compute.manager [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 922.099233] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 922.100851] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-858402c9-70ff-414c-afeb-14d99638c740 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.103901] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480352b8-cd4b-4e25-911f-3b3ae8eae3fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.111093] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 922.114159] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a663c79c-c25e-4e70-938a-650e91de5180 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.117268] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95865508-15e9-4f70-8722-a847f25e1a87 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.136703] env[68233]: DEBUG nova.compute.provider_tree [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.140301] env[68233]: DEBUG nova.network.neutron [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Successfully updated port: e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 922.142957] env[68233]: DEBUG oslo_vmware.api [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 922.142957] env[68233]: value = "task-2782607" [ 922.142957] env[68233]: _type = "Task" [ 922.142957] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.153528] env[68233]: DEBUG oslo_vmware.api [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782607, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.231179] env[68233]: DEBUG nova.network.neutron [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 922.319444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "0b1065c2-7923-4dc4-a64f-be72a7994472" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.319704] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.345321] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782606, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.375507] env[68233]: INFO nova.compute.manager [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Took 45.34 seconds to build instance. [ 922.519353] env[68233]: DEBUG nova.compute.manager [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Received event network-vif-plugged-e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 922.519353] env[68233]: DEBUG oslo_concurrency.lockutils [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] Acquiring lock "32e05800-e812-412a-b049-89178737cffd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.519353] env[68233]: DEBUG oslo_concurrency.lockutils [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] Lock "32e05800-e812-412a-b049-89178737cffd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.520071] env[68233]: DEBUG oslo_concurrency.lockutils [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] Lock "32e05800-e812-412a-b049-89178737cffd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.520071] env[68233]: DEBUG nova.compute.manager [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] No waiting events found dispatching network-vif-plugged-e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 922.520255] env[68233]: WARNING nova.compute.manager [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Received unexpected event network-vif-plugged-e82a5466-7521-4b0f-83e9-3856ecb008f3 for instance with vm_state building and task_state spawning. [ 922.520307] env[68233]: DEBUG nova.compute.manager [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Received event network-changed-e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 922.520505] env[68233]: DEBUG nova.compute.manager [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Refreshing instance network info cache due to event network-changed-e82a5466-7521-4b0f-83e9-3856ecb008f3. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 922.520643] env[68233]: DEBUG oslo_concurrency.lockutils [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] Acquiring lock "refresh_cache-32e05800-e812-412a-b049-89178737cffd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.520785] env[68233]: DEBUG oslo_concurrency.lockutils [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] Acquired lock "refresh_cache-32e05800-e812-412a-b049-89178737cffd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.520938] env[68233]: DEBUG nova.network.neutron [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Refreshing network info cache for port e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 922.526120] env[68233]: DEBUG nova.network.neutron [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Updating instance_info_cache with network_info: [{"id": "b710ae65-1e11-4b1c-8389-3094fbf99637", "address": "fa:16:3e:3c:17:d7", "network": {"id": "46679e6d-6532-4ab3-a804-ca7ba5c5f007", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-487322929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61a69000592d412587562d2d0f890515", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710ae65-1e", "ovs_interfaceid": "b710ae65-1e11-4b1c-8389-3094fbf99637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.645890] env[68233]: DEBUG nova.scheduler.client.report [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.652033] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "refresh_cache-32e05800-e812-412a-b049-89178737cffd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.664440] env[68233]: DEBUG oslo_vmware.api [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782607, 'name': PowerOffVM_Task, 'duration_secs': 0.214735} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.664728] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 922.664891] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 922.665162] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-71aadae0-a177-4bab-9504-12006bd573e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.739071] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.739977] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.739977] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Deleting the datastore file [datastore2] 876d428d-d5c9-422a-aba2-2d6c61b092db {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.740124] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-344e70ad-a71e-4729-bbd7-272340919258 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.747056] env[68233]: DEBUG oslo_vmware.api [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for the task: (returnval){ [ 922.747056] env[68233]: value = "task-2782612" [ 922.747056] env[68233]: _type = "Task" [ 922.747056] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.756626] env[68233]: DEBUG oslo_vmware.api [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.822080] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 922.858137] env[68233]: DEBUG oslo_vmware.api [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782606, 'name': PowerOnVM_Task, 'duration_secs': 0.58802} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.858453] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.858669] env[68233]: INFO nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Took 9.46 seconds to spawn the instance on the hypervisor. [ 922.858877] env[68233]: DEBUG nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.859849] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba39578-1383-44b9-a9e5-8531fe48be48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.878214] env[68233]: DEBUG oslo_concurrency.lockutils [None req-01112438-95c3-4db4-8c58-96f157cbe082 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.851s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.028807] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Releasing lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.029177] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Instance network_info: |[{"id": "b710ae65-1e11-4b1c-8389-3094fbf99637", "address": "fa:16:3e:3c:17:d7", "network": {"id": "46679e6d-6532-4ab3-a804-ca7ba5c5f007", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-487322929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61a69000592d412587562d2d0f890515", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710ae65-1e", "ovs_interfaceid": "b710ae65-1e11-4b1c-8389-3094fbf99637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 923.029557] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:17:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '664c466b-9417-49d7-83cc-364d964c403a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b710ae65-1e11-4b1c-8389-3094fbf99637', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.037098] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Creating folder: Project (61a69000592d412587562d2d0f890515). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.037553] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1747364f-76b0-4a71-acee-32d47badf74c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.048233] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Created folder: Project (61a69000592d412587562d2d0f890515) in parent group-v559223. [ 923.048326] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Creating folder: Instances. Parent ref: group-v559437. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 923.048559] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a9cf72c-1acf-4e11-9004-0bb527d38ad7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.055147] env[68233]: DEBUG nova.network.neutron [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.058161] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Created folder: Instances in parent group-v559437. [ 923.058391] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.058609] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.058869] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42449cff-04d8-417c-84f4-48f2c9bf9c9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.073444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "1207585c-fb2a-43b7-aec2-c3a7889255a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.073669] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.073863] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "1207585c-fb2a-43b7-aec2-c3a7889255a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.074051] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.074221] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.076309] env[68233]: INFO nova.compute.manager [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Terminating instance [ 923.080194] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.080194] env[68233]: value = "task-2782615" [ 923.080194] env[68233]: _type = "Task" [ 923.080194] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.088609] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782615, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.125905] env[68233]: DEBUG nova.network.neutron [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.158134] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.160611] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.329s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.160859] env[68233]: DEBUG nova.objects.instance [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lazy-loading 'resources' on Instance uuid abdf9de2-8563-4a31-91a3-0c18b0387533 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.194310] env[68233]: INFO nova.scheduler.client.report [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocations for instance c5c8bf0c-eb58-41bc-a316-b4ac78187658 [ 923.258702] env[68233]: DEBUG oslo_vmware.api [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Task: {'id': task-2782612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137041} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.259327] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 923.260023] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 923.260023] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 923.260023] env[68233]: INFO nova.compute.manager [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Took 1.16 seconds to destroy the instance on the hypervisor. [ 923.260203] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.260670] env[68233]: DEBUG nova.compute.manager [-] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 923.260809] env[68233]: DEBUG nova.network.neutron [-] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 923.345072] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.380086] env[68233]: INFO nova.compute.manager [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Took 35.64 seconds to build instance. [ 923.580413] env[68233]: DEBUG nova.compute.manager [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 923.580647] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 923.581568] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c273a9-0741-4152-8cc7-ffab6d6a487a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.592317] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 923.595525] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-38a9dec3-b7a3-42ac-804d-cb689a2bd3ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.596988] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782615, 'name': CreateVM_Task, 'duration_secs': 0.502928} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 923.597171] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 923.598234] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.598576] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.598738] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 923.599351] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05ca55d9-d962-4979-9ca1-4d2201947558 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.602449] env[68233]: DEBUG oslo_vmware.api [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 923.602449] env[68233]: value = "task-2782616" [ 923.602449] env[68233]: _type = "Task" [ 923.602449] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.606683] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 923.606683] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ca44de-697e-6e7e-07bc-9c5598595db1" [ 923.606683] env[68233]: _type = "Task" [ 923.606683] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.614173] env[68233]: DEBUG oslo_vmware.api [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782616, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.617246] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ca44de-697e-6e7e-07bc-9c5598595db1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.629920] env[68233]: DEBUG oslo_concurrency.lockutils [req-f570fa22-361e-45f7-aa00-1cb9f0262311 req-30b8acac-3d24-4a62-a7c1-7b9a7a17c8fc service nova] Releasing lock "refresh_cache-32e05800-e812-412a-b049-89178737cffd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.630343] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "refresh_cache-32e05800-e812-412a-b049-89178737cffd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.630509] env[68233]: DEBUG nova.network.neutron [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.705989] env[68233]: DEBUG oslo_concurrency.lockutils [None req-563a849b-9594-4007-824b-c554bb1e0d11 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "c5c8bf0c-eb58-41bc-a316-b4ac78187658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.237s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.883255] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2f1b8242-b5c7-4621-a685-578a3684f8fa tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "73ca71c0-34cd-4393-82ff-4b297d350209" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.148s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 924.113862] env[68233]: DEBUG oslo_vmware.api [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782616, 'name': PowerOffVM_Task, 'duration_secs': 0.371882} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.119289] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 924.119289] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 924.119867] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1d45eb4-40da-4a09-bfb2-ddef798117d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.127784] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ca44de-697e-6e7e-07bc-9c5598595db1, 'name': SearchDatastore_Task, 'duration_secs': 0.010219} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.127784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.127784] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 924.127784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.127784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.128182] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 924.128932] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00a087ad-18be-45f2-afbe-1ae1923cf449 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.140406] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 924.141209] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 924.144644] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9423d5cf-9572-4be6-ae35-a96391e1c008 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.149406] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 924.149406] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5274b38a-010d-82d5-4904-4a6d10af75a5" [ 924.149406] env[68233]: _type = "Task" [ 924.149406] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.160535] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5274b38a-010d-82d5-4904-4a6d10af75a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.163348] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974e00e8-7668-4142-a5c6-c6e425f10060 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.170314] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46765242-2352-4164-b3e2-2b1c0b11d244 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.202502] env[68233]: DEBUG nova.network.neutron [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.206180] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3abe0911-348f-4773-9c89-9e74d7457a2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.210855] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 924.211159] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 924.211393] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleting the datastore file [datastore1] 1207585c-fb2a-43b7-aec2-c3a7889255a5 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.212230] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1aeb1fec-c272-4ed2-bb7d-0e3efdd00276 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.219785] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb17915-c688-4f0c-89af-f98d2d461b8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.228022] env[68233]: DEBUG oslo_vmware.api [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 924.228022] env[68233]: value = "task-2782618" [ 924.228022] env[68233]: _type = "Task" [ 924.228022] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.237120] env[68233]: DEBUG nova.compute.provider_tree [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.245682] env[68233]: DEBUG oslo_vmware.api [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782618, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.376721] env[68233]: DEBUG nova.network.neutron [-] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.455269] env[68233]: DEBUG nova.network.neutron [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Updating instance_info_cache with network_info: [{"id": "e82a5466-7521-4b0f-83e9-3856ecb008f3", "address": "fa:16:3e:20:94:60", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape82a5466-75", "ovs_interfaceid": "e82a5466-7521-4b0f-83e9-3856ecb008f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.550638] env[68233]: DEBUG nova.compute.manager [req-d0a6deab-6746-493c-bd97-abfbbb41d6be req-d7bf30eb-2b04-4ba8-8255-76b0eb5d2bdb service nova] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Received event network-vif-deleted-bc63fa79-ccc0-4ad9-b4df-185add5228eb {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 924.629346] env[68233]: DEBUG nova.compute.manager [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-changed-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 924.629554] env[68233]: DEBUG nova.compute.manager [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Refreshing instance network info cache due to event network-changed-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 924.629786] env[68233]: DEBUG oslo_concurrency.lockutils [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] Acquiring lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.630102] env[68233]: DEBUG oslo_concurrency.lockutils [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] Acquired lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.630297] env[68233]: DEBUG nova.network.neutron [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Refreshing network info cache for port e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 924.661442] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5274b38a-010d-82d5-4904-4a6d10af75a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010354} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.665326] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf6e10ff-cf06-455a-9ea0-a0badfee9632 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.673669] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 924.673669] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52600b5a-adc2-d52d-e43e-de319799ae81" [ 924.673669] env[68233]: _type = "Task" [ 924.673669] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.687489] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52600b5a-adc2-d52d-e43e-de319799ae81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.734769] env[68233]: DEBUG oslo_vmware.api [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782618, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130814} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.735284] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.735284] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 924.735459] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 924.735854] env[68233]: INFO nova.compute.manager [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 924.735938] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.736214] env[68233]: DEBUG nova.compute.manager [-] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 924.736319] env[68233]: DEBUG nova.network.neutron [-] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 924.739890] env[68233]: DEBUG nova.scheduler.client.report [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.877820] env[68233]: INFO nova.compute.manager [-] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Took 1.62 seconds to deallocate network for instance. [ 924.957885] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "refresh_cache-32e05800-e812-412a-b049-89178737cffd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 924.958271] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Instance network_info: |[{"id": "e82a5466-7521-4b0f-83e9-3856ecb008f3", "address": "fa:16:3e:20:94:60", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape82a5466-75", "ovs_interfaceid": "e82a5466-7521-4b0f-83e9-3856ecb008f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 924.958783] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:94:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e82a5466-7521-4b0f-83e9-3856ecb008f3', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 924.966842] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 924.967328] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32e05800-e812-412a-b049-89178737cffd] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 924.967573] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-55c07b7e-90d5-4848-92fb-c21650ac9350 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.989972] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 924.989972] env[68233]: value = "task-2782619" [ 924.989972] env[68233]: _type = "Task" [ 924.989972] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.997977] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782619, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.190880] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52600b5a-adc2-d52d-e43e-de319799ae81, 'name': SearchDatastore_Task, 'duration_secs': 0.039175} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.191412] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.191659] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 827711ac-ef52-41a0-9029-0a1805522a08/827711ac-ef52-41a0-9029-0a1805522a08.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 925.192091] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-213f28cf-bcb4-4451-a80c-cd6ec8db291e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.202226] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 925.202226] env[68233]: value = "task-2782620" [ 925.202226] env[68233]: _type = "Task" [ 925.202226] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.210796] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.242484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.242933] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.244534] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.084s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.247659] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.532s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.248315] env[68233]: DEBUG nova.objects.instance [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lazy-loading 'resources' on Instance uuid 0f813d55-2737-44ae-b62d-3321e77dfdab {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 925.281599] env[68233]: INFO nova.scheduler.client.report [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted allocations for instance abdf9de2-8563-4a31-91a3-0c18b0387533 [ 925.386914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.502214] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782619, 'name': CreateVM_Task, 'duration_secs': 0.489159} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.502472] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 32e05800-e812-412a-b049-89178737cffd] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 925.503155] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.508726] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.508726] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 925.508726] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-139241cc-d8a0-4970-a07a-4e29dd438e4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.509625] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 925.509625] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3e0d3-c89b-aadb-12a0-6b351edabc25" [ 925.509625] env[68233]: _type = "Task" [ 925.509625] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.532746] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3e0d3-c89b-aadb-12a0-6b351edabc25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.663303] env[68233]: DEBUG nova.network.neutron [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updated VIF entry in instance network info cache for port e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.663746] env[68233]: DEBUG nova.network.neutron [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.710714] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782620, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.750641] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 925.797036] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f8f0611-43e8-4014-b8e6-c2a1d678ad58 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "abdf9de2-8563-4a31-91a3-0c18b0387533" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.599s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.023087] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.023469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.023703] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.023892] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.024078] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.025717] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3e0d3-c89b-aadb-12a0-6b351edabc25, 'name': SearchDatastore_Task, 'duration_secs': 0.054326} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.028273] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.028510] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.028737] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.028881] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.029473] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.030247] env[68233]: INFO nova.compute.manager [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Terminating instance [ 926.031602] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6bb717c-ac55-4409-8622-4cfb9142fe24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.045376] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.045559] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.046352] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f76d750-493b-4dc1-b724-8c59f9212b48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.052817] env[68233]: DEBUG nova.network.neutron [-] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.054285] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 926.054285] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52adc192-ad1a-ce16-5978-5a237193d3d9" [ 926.054285] env[68233]: _type = "Task" [ 926.054285] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.071430] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52adc192-ad1a-ce16-5978-5a237193d3d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.166272] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc1741e-f79c-401c-a0d7-7b40fb5edf8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.169519] env[68233]: DEBUG oslo_concurrency.lockutils [req-a8cb53d1-8640-4115-ab7d-53d22209daf4 req-67c49510-6601-423e-8328-cb499df6050f service nova] Releasing lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.174379] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c277bc-22cb-41de-9844-8a9474b360db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.208087] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8085b978-f369-4dee-9a78-0d557b86060e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.217854] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590021} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.219045] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86fbb041-e3c3-42db-b59f-580b9824b6c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.222675] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 827711ac-ef52-41a0-9029-0a1805522a08/827711ac-ef52-41a0-9029-0a1805522a08.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 926.222893] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.223144] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef811356-dbee-45d1-babf-17dd1b4e440d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.234674] env[68233]: DEBUG nova.compute.provider_tree [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.237497] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 926.237497] env[68233]: value = "task-2782622" [ 926.237497] env[68233]: _type = "Task" [ 926.237497] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.245506] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.268706] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.536788] env[68233]: DEBUG nova.compute.manager [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 926.537054] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 926.537951] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904f13a7-f9dc-49b2-bb3f-bf39bdd8419a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.545939] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 926.546198] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1641a259-24d3-40a5-96b1-9de8eed8c6b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.552693] env[68233]: DEBUG oslo_vmware.api [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 926.552693] env[68233]: value = "task-2782623" [ 926.552693] env[68233]: _type = "Task" [ 926.552693] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.563032] env[68233]: INFO nova.compute.manager [-] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Took 1.83 seconds to deallocate network for instance. [ 926.563673] env[68233]: DEBUG oslo_vmware.api [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.577021] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52adc192-ad1a-ce16-5978-5a237193d3d9, 'name': SearchDatastore_Task, 'duration_secs': 0.026236} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.579705] env[68233]: DEBUG nova.compute.manager [req-4c9d7292-45df-43de-a24f-eeb21e9efd2f req-58f647d8-4847-413b-9c60-5db335eaa5d0 service nova] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Received event network-vif-deleted-2adac63b-c8ff-490f-bcf1-316fb58c480a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 926.579967] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27bf330a-4aaf-404a-96e2-f1fb2046fd35 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.585462] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 926.585462] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529369a7-e8df-0550-9fd4-788ef363f508" [ 926.585462] env[68233]: _type = "Task" [ 926.585462] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.593053] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529369a7-e8df-0550-9fd4-788ef363f508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.739873] env[68233]: DEBUG nova.scheduler.client.report [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 926.751660] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.063450] env[68233]: DEBUG oslo_vmware.api [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782623, 'name': PowerOffVM_Task, 'duration_secs': 0.489892} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.063773] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 927.063938] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 927.064202] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9de1eece-72a8-45eb-86cf-2e05e62dc196 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.072969] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.095639] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529369a7-e8df-0550-9fd4-788ef363f508, 'name': SearchDatastore_Task, 'duration_secs': 0.020919} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.095886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.096184] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 32e05800-e812-412a-b049-89178737cffd/32e05800-e812-412a-b049-89178737cffd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 927.096440] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cf84cc0-69ec-4c4e-b2d7-0eafd7764a04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.103687] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 927.103687] env[68233]: value = "task-2782625" [ 927.103687] env[68233]: _type = "Task" [ 927.103687] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.111438] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.158176] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 927.158401] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 927.158586] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleting the datastore file [datastore2] 6ceb7d2d-143a-464a-aca5-6b6838630bb8 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 927.158850] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1cdcdc6e-b852-4eb9-89e3-eafeba0fc4c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.164823] env[68233]: DEBUG oslo_vmware.api [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 927.164823] env[68233]: value = "task-2782626" [ 927.164823] env[68233]: _type = "Task" [ 927.164823] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.172633] env[68233]: DEBUG oslo_vmware.api [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782626, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.248582] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.251118] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.153s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.251396] env[68233]: DEBUG nova.objects.instance [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lazy-loading 'resources' on Instance uuid 903f0919-b321-4d74-9ea2-bc9771184ded {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.258274] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782622, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.980227} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.259430] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.259891] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb830dea-8ff3-4a93-aabd-e93810917188 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.282848] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 827711ac-ef52-41a0-9029-0a1805522a08/827711ac-ef52-41a0-9029-0a1805522a08.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.284280] env[68233]: INFO nova.scheduler.client.report [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted allocations for instance 0f813d55-2737-44ae-b62d-3321e77dfdab [ 927.285943] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-567a7c53-ab5e-4071-b803-634622329fe9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.308339] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 927.308339] env[68233]: value = "task-2782627" [ 927.308339] env[68233]: _type = "Task" [ 927.308339] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.320366] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782627, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.614220] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477223} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.614580] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 32e05800-e812-412a-b049-89178737cffd/32e05800-e812-412a-b049-89178737cffd.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.614767] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.615029] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35ec66ed-1b75-4269-92f4-f8bf7b0024fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.621623] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 927.621623] env[68233]: value = "task-2782629" [ 927.621623] env[68233]: _type = "Task" [ 927.621623] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.630389] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.675392] env[68233]: DEBUG oslo_vmware.api [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782626, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323827} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.675772] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 927.676195] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 927.676273] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 927.676459] env[68233]: INFO nova.compute.manager [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 927.676762] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 927.677021] env[68233]: DEBUG nova.compute.manager [-] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 927.677156] env[68233]: DEBUG nova.network.neutron [-] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 927.807447] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0e206a85-40f5-4178-86dd-512f528a996b tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "0f813d55-2737-44ae-b62d-3321e77dfdab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.538s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.822256] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.133302] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072149} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.133642] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.134368] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352f5f29-3073-4214-acef-2a422faa66e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.159836] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 32e05800-e812-412a-b049-89178737cffd/32e05800-e812-412a-b049-89178737cffd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.162517] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e1ad647-e403-4d94-adf3-26602ebae10b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.182220] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 928.182220] env[68233]: value = "task-2782630" [ 928.182220] env[68233]: _type = "Task" [ 928.182220] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.191361] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.213488] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed56eb72-f50d-47df-866e-298dfa1da785 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.220852] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-220a1899-febc-4d44-91fb-146e578bd767 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.253276] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c23f8f-564b-437b-b6a2-9532c48d1439 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.261164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246e2ad6-6b42-4802-9bcf-0a78eeed820a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.274904] env[68233]: DEBUG nova.compute.provider_tree [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 928.320280] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782627, 'name': ReconfigVM_Task, 'duration_secs': 0.611959} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.320558] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 827711ac-ef52-41a0-9029-0a1805522a08/827711ac-ef52-41a0-9029-0a1805522a08.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.321194] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d2aa7ee-8b25-452a-8674-822b61e54b7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.326868] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 928.326868] env[68233]: value = "task-2782631" [ 928.326868] env[68233]: _type = "Task" [ 928.326868] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.334819] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782631, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.466068] env[68233]: DEBUG nova.network.neutron [-] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.674603] env[68233]: DEBUG nova.compute.manager [req-b215e374-3f22-4009-a302-f4c732b9c8bb req-e2238ba2-4e6c-4b96-88af-035eddab806b service nova] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Received event network-vif-deleted-4a662388-bd69-4cf2-bc5f-b97de14b1ee3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 928.692032] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.777789] env[68233]: DEBUG nova.scheduler.client.report [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 928.836756] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782631, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.968122] env[68233]: INFO nova.compute.manager [-] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Took 1.29 seconds to deallocate network for instance. [ 929.194535] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782630, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.283121] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.285783] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.317s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 929.285783] env[68233]: DEBUG nova.objects.instance [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lazy-loading 'resources' on Instance uuid 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 929.308835] env[68233]: INFO nova.scheduler.client.report [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Deleted allocations for instance 903f0919-b321-4d74-9ea2-bc9771184ded [ 929.340554] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782631, 'name': Rename_Task, 'duration_secs': 0.532935} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.342208] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.342521] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fbb0db4-1aa5-4279-9d03-5fd1f482706f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.348852] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 929.348852] env[68233]: value = "task-2782632" [ 929.348852] env[68233]: _type = "Task" [ 929.348852] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.356942] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.474963] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.694166] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782630, 'name': ReconfigVM_Task, 'duration_secs': 1.12297} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.694471] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 32e05800-e812-412a-b049-89178737cffd/32e05800-e812-412a-b049-89178737cffd.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.695128] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6850bcd5-6d84-4cf8-92e7-fbe4a6b717d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.702288] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 929.702288] env[68233]: value = "task-2782634" [ 929.702288] env[68233]: _type = "Task" [ 929.702288] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.711743] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782634, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.818263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d82f6e7-ffbe-4ca2-b3c3-4a3bc717e3fe tempest-MultipleCreateTestJSON-1316369434 tempest-MultipleCreateTestJSON-1316369434-project-member] Lock "903f0919-b321-4d74-9ea2-bc9771184ded" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.340s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 929.860908] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782632, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.138798] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6bfc2fa-e7ee-4781-a44d-a070073f33d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.147737] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43869f78-759e-4355-8bb7-1f3bf4aac543 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.179981] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff79466b-9805-4887-9ecb-c11c44655052 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.187955] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f387bc2-31f3-4e72-9f04-ea324d7772d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.210644] env[68233]: DEBUG nova.compute.provider_tree [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.226130] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782634, 'name': Rename_Task, 'duration_secs': 0.304389} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.226554] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.226917] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e0ff2b4-270c-4715-be78-5218c40d070a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.235875] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 930.235875] env[68233]: value = "task-2782635" [ 930.235875] env[68233]: _type = "Task" [ 930.235875] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.247866] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.365051] env[68233]: DEBUG oslo_vmware.api [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782632, 'name': PowerOnVM_Task, 'duration_secs': 0.857482} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.365569] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 930.366467] env[68233]: INFO nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Took 12.18 seconds to spawn the instance on the hypervisor. [ 930.366467] env[68233]: DEBUG nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.367490] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef062b20-8563-47f0-9668-a8df67769578 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.715135] env[68233]: DEBUG nova.scheduler.client.report [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 930.746677] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782635, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.889019] env[68233]: INFO nova.compute.manager [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Took 31.02 seconds to build instance. [ 931.219463] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.222012] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.581s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.222246] env[68233]: DEBUG nova.objects.instance [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lazy-loading 'resources' on Instance uuid 3cca16e1-3363-4026-9359-4ed2ba41e25d {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.248427] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782635, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.249400] env[68233]: INFO nova.scheduler.client.report [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Deleted allocations for instance 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f [ 931.393663] env[68233]: DEBUG oslo_concurrency.lockutils [None req-133217a2-a62e-41dc-9f34-325d6bf1c5de tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.533s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.502470] env[68233]: DEBUG nova.compute.manager [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Received event network-changed-b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 931.502712] env[68233]: DEBUG nova.compute.manager [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Refreshing instance network info cache due to event network-changed-b710ae65-1e11-4b1c-8389-3094fbf99637. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 931.502829] env[68233]: DEBUG oslo_concurrency.lockutils [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] Acquiring lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.503038] env[68233]: DEBUG oslo_concurrency.lockutils [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] Acquired lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.505031] env[68233]: DEBUG nova.network.neutron [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Refreshing network info cache for port b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.748536] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782635, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.761555] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4001ac4a-019c-4eb7-8e6c-ba5317775d2d tempest-MigrationsAdminTest-1088288043 tempest-MigrationsAdminTest-1088288043-project-member] Lock "2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.636s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.191654] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc3a190-2c5f-410e-b269-b00d3037f930 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.206115] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bdc886-9af3-427e-8ca5-e733b998a404 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.248281] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94180773-f895-47ff-9af7-5258ae5fc24e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.257321] env[68233]: DEBUG oslo_vmware.api [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782635, 'name': PowerOnVM_Task, 'duration_secs': 2.014523} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.259576] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.259876] env[68233]: INFO nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Took 11.42 seconds to spawn the instance on the hypervisor. [ 932.260134] env[68233]: DEBUG nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 932.261088] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a21a94-311c-495a-bb05-f95173651d2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.265145] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d249f90b-f216-4bae-9ae9-c8ac234e60f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.284707] env[68233]: DEBUG nova.compute.provider_tree [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.395024] env[68233]: DEBUG nova.network.neutron [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Updated VIF entry in instance network info cache for port b710ae65-1e11-4b1c-8389-3094fbf99637. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.395343] env[68233]: DEBUG nova.network.neutron [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Updating instance_info_cache with network_info: [{"id": "b710ae65-1e11-4b1c-8389-3094fbf99637", "address": "fa:16:3e:3c:17:d7", "network": {"id": "46679e6d-6532-4ab3-a804-ca7ba5c5f007", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-487322929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.242", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61a69000592d412587562d2d0f890515", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb710ae65-1e", "ovs_interfaceid": "b710ae65-1e11-4b1c-8389-3094fbf99637", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.797856] env[68233]: DEBUG nova.scheduler.client.report [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.803687] env[68233]: INFO nova.compute.manager [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Took 31.41 seconds to build instance. [ 932.898415] env[68233]: DEBUG oslo_concurrency.lockutils [req-81d4bb68-666c-4f04-837e-6f94203c589b req-517bd4e3-80f7-484f-bad6-c2edcb11fa05 service nova] Releasing lock "refresh_cache-827711ac-ef52-41a0-9029-0a1805522a08" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.188076] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "32e05800-e812-412a-b049-89178737cffd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.305217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.081s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.306492] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.187s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.307127] env[68233]: DEBUG nova.objects.instance [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lazy-loading 'resources' on Instance uuid 8880bb83-56f1-4ad2-9d6d-1885826aed21 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 933.308474] env[68233]: DEBUG oslo_concurrency.lockutils [None req-068cf117-a48f-4d23-a073-d9a4a0dce686 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "32e05800-e812-412a-b049-89178737cffd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.926s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.309012] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "32e05800-e812-412a-b049-89178737cffd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.121s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.315374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "32e05800-e812-412a-b049-89178737cffd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.315374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "32e05800-e812-412a-b049-89178737cffd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.315374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "32e05800-e812-412a-b049-89178737cffd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.317150] env[68233]: INFO nova.compute.manager [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Terminating instance [ 933.341264] env[68233]: INFO nova.scheduler.client.report [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Deleted allocations for instance 3cca16e1-3363-4026-9359-4ed2ba41e25d [ 933.820390] env[68233]: DEBUG nova.compute.manager [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 933.821292] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 933.822273] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82c7e0b1-af3c-47af-8892-159f18a37889 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.833290] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 933.833649] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5ab4cbd-9502-4775-8881-4446f2876fee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.842585] env[68233]: DEBUG oslo_vmware.api [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 933.842585] env[68233]: value = "task-2782637" [ 933.842585] env[68233]: _type = "Task" [ 933.842585] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.853629] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeb00224-8b8f-4654-b547-36df1ef91973 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "3cca16e1-3363-4026-9359-4ed2ba41e25d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.899s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.859813] env[68233]: DEBUG oslo_vmware.api [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782637, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.236928] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50c480c-4d53-4d91-8c49-117c95182109 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.246236] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bb8069-1c62-4e64-b555-dc54e6a6f5ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.281839] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b503fd75-0ff5-4ff0-8057-b33bcbb50025 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.290668] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e830668f-52b7-4445-87ed-a1f360d0c956 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.306758] env[68233]: DEBUG nova.compute.provider_tree [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.353844] env[68233]: DEBUG oslo_vmware.api [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782637, 'name': PowerOffVM_Task, 'duration_secs': 0.35591} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.354146] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.354323] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.354776] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17892ec9-5d33-47f7-a7bc-05f6c9283f8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.454028] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.454028] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.454028] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleting the datastore file [datastore2] 32e05800-e812-412a-b049-89178737cffd {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.454028] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-772a4f9f-0b42-4a48-9467-8e38a2c23a1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.459462] env[68233]: DEBUG oslo_vmware.api [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 934.459462] env[68233]: value = "task-2782639" [ 934.459462] env[68233]: _type = "Task" [ 934.459462] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.468486] env[68233]: DEBUG oslo_vmware.api [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.597892] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.598187] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.598404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.598590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.598758] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.601101] env[68233]: INFO nova.compute.manager [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Terminating instance [ 934.810260] env[68233]: DEBUG nova.scheduler.client.report [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 934.971231] env[68233]: DEBUG oslo_vmware.api [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292023} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.971647] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 934.971914] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 934.971991] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 934.972187] env[68233]: INFO nova.compute.manager [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: 32e05800-e812-412a-b049-89178737cffd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 934.972498] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 934.972924] env[68233]: DEBUG nova.compute.manager [-] [instance: 32e05800-e812-412a-b049-89178737cffd] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 934.972924] env[68233]: DEBUG nova.network.neutron [-] [instance: 32e05800-e812-412a-b049-89178737cffd] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.106795] env[68233]: DEBUG nova.compute.manager [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.107351] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.110029] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3843fcc-07c4-4557-9ff9-62ebd431a1ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.124594] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.124594] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d84b9d0-7860-439f-9af6-e42d85e25242 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.133347] env[68233]: DEBUG oslo_vmware.api [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 935.133347] env[68233]: value = "task-2782640" [ 935.133347] env[68233]: _type = "Task" [ 935.133347] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.142298] env[68233]: DEBUG oslo_vmware.api [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.315410] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.009s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.319313] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.973s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.319811] env[68233]: INFO nova.compute.claims [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.345376] env[68233]: INFO nova.scheduler.client.report [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Deleted allocations for instance 8880bb83-56f1-4ad2-9d6d-1885826aed21 [ 935.649137] env[68233]: DEBUG oslo_vmware.api [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782640, 'name': PowerOffVM_Task, 'duration_secs': 0.249852} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.649418] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 935.649589] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 935.649851] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a63c0d4-f9e7-4bd6-aa16-6974fc31bef9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.738130] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 935.738576] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 935.738820] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Deleting the datastore file [datastore2] 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 935.739423] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb173a7c-8f92-4840-893a-f6d600de1783 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.745818] env[68233]: DEBUG nova.compute.manager [req-851e6da2-4477-444b-958e-caf9282acb78 req-3200dfeb-121f-4262-94e4-84439d79d371 service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Received event network-vif-deleted-e82a5466-7521-4b0f-83e9-3856ecb008f3 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 935.746029] env[68233]: INFO nova.compute.manager [req-851e6da2-4477-444b-958e-caf9282acb78 req-3200dfeb-121f-4262-94e4-84439d79d371 service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Neutron deleted interface e82a5466-7521-4b0f-83e9-3856ecb008f3; detaching it from the instance and deleting it from the info cache [ 935.746199] env[68233]: DEBUG nova.network.neutron [req-851e6da2-4477-444b-958e-caf9282acb78 req-3200dfeb-121f-4262-94e4-84439d79d371 service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.750308] env[68233]: DEBUG oslo_vmware.api [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for the task: (returnval){ [ 935.750308] env[68233]: value = "task-2782643" [ 935.750308] env[68233]: _type = "Task" [ 935.750308] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.764818] env[68233]: DEBUG oslo_vmware.api [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782643, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.855817] env[68233]: DEBUG oslo_concurrency.lockutils [None req-05f137de-c318-4de7-921e-345dffcc1b98 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "8880bb83-56f1-4ad2-9d6d-1885826aed21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.736s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.224058] env[68233]: DEBUG nova.network.neutron [-] [instance: 32e05800-e812-412a-b049-89178737cffd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.249615] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fca71afa-70f1-4700-abb0-dc298dd81003 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.262946] env[68233]: DEBUG oslo_vmware.api [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Task: {'id': task-2782643, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.209198} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.263814] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 936.264023] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 936.264194] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 936.264366] env[68233]: INFO nova.compute.manager [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Took 1.16 seconds to destroy the instance on the hypervisor. [ 936.264603] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 936.264857] env[68233]: DEBUG nova.compute.manager [-] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 936.264953] env[68233]: DEBUG nova.network.neutron [-] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 936.269499] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25364758-c1bc-47ed-889e-ee5098b5ffff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.309535] env[68233]: DEBUG nova.compute.manager [req-851e6da2-4477-444b-958e-caf9282acb78 req-3200dfeb-121f-4262-94e4-84439d79d371 service nova] [instance: 32e05800-e812-412a-b049-89178737cffd] Detach interface failed, port_id=e82a5466-7521-4b0f-83e9-3856ecb008f3, reason: Instance 32e05800-e812-412a-b049-89178737cffd could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 936.704229] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0fa7f03-8e3b-40c5-9f45-0f654e6cfb1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.712255] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482663ed-0765-45ae-b3d8-2b6caf2938a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.751977] env[68233]: INFO nova.compute.manager [-] [instance: 32e05800-e812-412a-b049-89178737cffd] Took 1.78 seconds to deallocate network for instance. [ 936.755043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324784d4-241c-4d70-b866-0b88a3eee023 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.767606] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac9ddf4-dfd8-4dc3-8158-e4b5fa744b24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.783372] env[68233]: DEBUG nova.compute.provider_tree [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.058793] env[68233]: DEBUG nova.network.neutron [-] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.080455] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.080703] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.150606] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.151031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.151314] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.151654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.151904] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.154762] env[68233]: INFO nova.compute.manager [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Terminating instance [ 937.266257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.286508] env[68233]: DEBUG nova.scheduler.client.report [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 937.562853] env[68233]: INFO nova.compute.manager [-] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Took 1.30 seconds to deallocate network for instance. [ 937.583473] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 937.660614] env[68233]: DEBUG nova.compute.manager [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 937.660977] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.662398] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32296a6d-75f6-4c22-bd58-1032fc109906 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.673624] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.674072] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e54a4400-0076-4bb8-a3c2-9c2d5cf9a9ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.683053] env[68233]: DEBUG oslo_vmware.api [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 937.683053] env[68233]: value = "task-2782645" [ 937.683053] env[68233]: _type = "Task" [ 937.683053] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.697160] env[68233]: DEBUG oslo_vmware.api [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782645, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.792074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.792793] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 937.796372] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.409s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.796372] env[68233]: DEBUG nova.objects.instance [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lazy-loading 'resources' on Instance uuid 876d428d-d5c9-422a-aba2-2d6c61b092db {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 937.826166] env[68233]: DEBUG nova.compute.manager [req-b8f70148-6d91-41d7-9a7f-85e8731dafe1 req-2e4b5561-a983-41b3-a963-5a76a71ff349 service nova] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Received event network-vif-deleted-1ce20932-0faf-4cba-a1ab-409619e3147b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 938.070763] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.108170] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.194319] env[68233]: DEBUG oslo_vmware.api [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782645, 'name': PowerOffVM_Task, 'duration_secs': 0.242224} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.194602] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.194807] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.195075] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8742316-0fb9-4305-be92-0a15b13f4b46 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.268883] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.269186] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.269316] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Deleting the datastore file [datastore2] 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.269580] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c779393-a864-40ca-aeed-6b8af786f163 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.277092] env[68233]: DEBUG oslo_vmware.api [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for the task: (returnval){ [ 938.277092] env[68233]: value = "task-2782647" [ 938.277092] env[68233]: _type = "Task" [ 938.277092] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.287469] env[68233]: DEBUG oslo_vmware.api [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.299011] env[68233]: DEBUG nova.compute.utils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 938.300610] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 938.300793] env[68233]: DEBUG nova.network.neutron [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 938.370348] env[68233]: DEBUG nova.policy [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 938.635494] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8908039b-8e3a-4451-8c9b-65be82de2b8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.639093] env[68233]: DEBUG nova.network.neutron [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Successfully created port: 4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.648266] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805beb6c-e0c0-48ac-872b-711554c17177 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.695272] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19f1350-348f-4f4d-8ad3-cc413d122f98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.704623] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668a7265-3020-49ca-8bee-0a5ed97e481e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.723025] env[68233]: DEBUG nova.compute.provider_tree [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.787647] env[68233]: DEBUG oslo_vmware.api [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Task: {'id': task-2782647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167092} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.788663] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.788663] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.788663] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.788663] env[68233]: INFO nova.compute.manager [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Took 1.13 seconds to destroy the instance on the hypervisor. [ 938.788974] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.788974] env[68233]: DEBUG nova.compute.manager [-] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.788974] env[68233]: DEBUG nova.network.neutron [-] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.808794] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 939.225375] env[68233]: DEBUG nova.scheduler.client.report [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 939.694720] env[68233]: DEBUG nova.network.neutron [-] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.730224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.733058] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.464s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.736225] env[68233]: INFO nova.compute.claims [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 939.755520] env[68233]: INFO nova.scheduler.client.report [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Deleted allocations for instance 876d428d-d5c9-422a-aba2-2d6c61b092db [ 939.818813] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 939.827917] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 939.828140] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 939.846939] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 939.847260] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 939.847451] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 939.847899] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 939.847899] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 939.848977] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 939.848977] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 939.848977] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 939.848977] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 939.848977] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 939.849164] env[68233]: DEBUG nova.virt.hardware [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 939.850308] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8225b86-2e8b-4972-b174-b6c9d0ed75e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.855681] env[68233]: DEBUG nova.compute.manager [req-5beb468a-15d8-4045-bba2-102600b2eacc req-d6f85dab-c8ee-479a-a7a7-8206d1909d64 service nova] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Received event network-vif-deleted-1592075d-4a4d-4e79-9eaa-57d357a45e17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 939.863269] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de17a159-6879-44f9-9710-9b386e21777f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.199478] env[68233]: INFO nova.compute.manager [-] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Took 1.41 seconds to deallocate network for instance. [ 940.265571] env[68233]: DEBUG oslo_concurrency.lockutils [None req-424fa367-b050-4335-8380-3f124bd81cab tempest-ServersTestJSON-78925205 tempest-ServersTestJSON-78925205-project-member] Lock "876d428d-d5c9-422a-aba2-2d6c61b092db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.699s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.336501] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.336708] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.336861] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.337025] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.337186] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.337334] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.337475] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 940.337621] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 940.449484] env[68233]: DEBUG nova.network.neutron [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Successfully updated port: 4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.707435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.843948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.954846] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-0b1065c2-7923-4dc4-a64f-be72a7994472" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.954998] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-0b1065c2-7923-4dc4-a64f-be72a7994472" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.955152] env[68233]: DEBUG nova.network.neutron [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.153730] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e731cec6-be84-410b-a788-9ee27d7862b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.163353] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b96cc4-c158-45bd-9fc7-f4ddcced02f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.198067] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a944807-796a-42ec-95a1-743a8aee553f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.207573] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb80b79-71b0-430a-a833-ad0a87fdd153 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.223815] env[68233]: DEBUG nova.compute.provider_tree [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.316293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.316539] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.002s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.514687] env[68233]: DEBUG nova.network.neutron [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.727355] env[68233]: DEBUG nova.scheduler.client.report [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 941.820134] env[68233]: DEBUG nova.compute.utils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 941.844291] env[68233]: DEBUG nova.network.neutron [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Updating instance_info_cache with network_info: [{"id": "4b5fdae0-2826-4f7e-89fe-1d02f303de0e", "address": "fa:16:3e:55:08:cc", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5fdae0-28", "ovs_interfaceid": "4b5fdae0-2826-4f7e-89fe-1d02f303de0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.234276] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.499s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.234276] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 942.235846] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.163s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.236075] env[68233]: DEBUG nova.objects.instance [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lazy-loading 'resources' on Instance uuid 1207585c-fb2a-43b7-aec2-c3a7889255a5 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 942.243308] env[68233]: DEBUG nova.compute.manager [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Received event network-vif-plugged-4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 942.243555] env[68233]: DEBUG oslo_concurrency.lockutils [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] Acquiring lock "0b1065c2-7923-4dc4-a64f-be72a7994472-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.244330] env[68233]: DEBUG oslo_concurrency.lockutils [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.244504] env[68233]: DEBUG oslo_concurrency.lockutils [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.244736] env[68233]: DEBUG nova.compute.manager [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] No waiting events found dispatching network-vif-plugged-4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 942.244941] env[68233]: WARNING nova.compute.manager [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Received unexpected event network-vif-plugged-4b5fdae0-2826-4f7e-89fe-1d02f303de0e for instance with vm_state building and task_state spawning. [ 942.245102] env[68233]: DEBUG nova.compute.manager [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Received event network-changed-4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 942.245370] env[68233]: DEBUG nova.compute.manager [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Refreshing instance network info cache due to event network-changed-4b5fdae0-2826-4f7e-89fe-1d02f303de0e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 942.245549] env[68233]: DEBUG oslo_concurrency.lockutils [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] Acquiring lock "refresh_cache-0b1065c2-7923-4dc4-a64f-be72a7994472" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.323050] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.347204] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-0b1065c2-7923-4dc4-a64f-be72a7994472" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.347540] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Instance network_info: |[{"id": "4b5fdae0-2826-4f7e-89fe-1d02f303de0e", "address": "fa:16:3e:55:08:cc", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5fdae0-28", "ovs_interfaceid": "4b5fdae0-2826-4f7e-89fe-1d02f303de0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 942.347850] env[68233]: DEBUG oslo_concurrency.lockutils [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] Acquired lock "refresh_cache-0b1065c2-7923-4dc4-a64f-be72a7994472" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.348039] env[68233]: DEBUG nova.network.neutron [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Refreshing network info cache for port 4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.349295] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:08:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b5fdae0-2826-4f7e-89fe-1d02f303de0e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.357176] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.360180] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.360687] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b5d4739-5cd5-4ebf-b337-477625499f36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.384941] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.384941] env[68233]: value = "task-2782649" [ 942.384941] env[68233]: _type = "Task" [ 942.384941] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.396279] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782649, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.562035] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "9c0e581d-5856-470f-a737-301649d701e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.562332] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.742913] env[68233]: DEBUG nova.compute.utils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 942.747495] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 942.747768] env[68233]: DEBUG nova.network.neutron [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 942.826420] env[68233]: DEBUG nova.policy [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e45602ffbf4d66b6bfcac59f078e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd32ae322ad5641b4bebd1aa390b5914f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 942.831862] env[68233]: DEBUG nova.network.neutron [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Updated VIF entry in instance network info cache for port 4b5fdae0-2826-4f7e-89fe-1d02f303de0e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 942.832226] env[68233]: DEBUG nova.network.neutron [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Updating instance_info_cache with network_info: [{"id": "4b5fdae0-2826-4f7e-89fe-1d02f303de0e", "address": "fa:16:3e:55:08:cc", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b5fdae0-28", "ovs_interfaceid": "4b5fdae0-2826-4f7e-89fe-1d02f303de0e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.897857] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782649, 'name': CreateVM_Task, 'duration_secs': 0.417042} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.899364] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 942.903522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.903522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.903522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 942.903822] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f72a161-c1d1-4ae6-902f-97d7ea369cf5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.910962] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 942.910962] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529528ad-b9b8-0159-ce5b-13394e9a71ec" [ 942.910962] env[68233]: _type = "Task" [ 942.910962] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.923800] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529528ad-b9b8-0159-ce5b-13394e9a71ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009922} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.927844] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.927844] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.927844] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.927844] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.927844] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.928459] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f953ae89-b891-4d60-a3d6-fae584ad27c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.938845] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.938968] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.940737] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b883efbe-09cf-4de8-8a5f-7826ea528358 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.955859] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 942.955859] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa6b4f-14f6-fcd6-95c1-c352706fc21d" [ 942.955859] env[68233]: _type = "Task" [ 942.955859] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.969160] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa6b4f-14f6-fcd6-95c1-c352706fc21d, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.969160] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12f46168-a00f-42d7-9b0e-70c0a635fba6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.976110] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 942.976110] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5219f0df-91a3-ec9f-4b52-17a3ddf0927f" [ 942.976110] env[68233]: _type = "Task" [ 942.976110] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.984699] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5219f0df-91a3-ec9f-4b52-17a3ddf0927f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.064560] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 943.204077] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f93ad50-f9b8-4cd1-993d-e70356bac843 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.213338] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbb1a41-9246-4996-b07e-9f3f75b757b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.252820] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 943.256424] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e977c8-cdde-4ef9-b8a2-7f2efbb7059b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.265095] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb3d3d5-a098-4e0a-b7f8-441a3ebe8316 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.281816] env[68233]: DEBUG nova.compute.provider_tree [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.330849] env[68233]: DEBUG nova.network.neutron [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Successfully created port: aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 943.337045] env[68233]: DEBUG oslo_concurrency.lockutils [req-18cebf5d-f71c-4674-a6b1-e6bf76baaf76 req-c0379892-92ab-408f-b727-3b3703173165 service nova] Releasing lock "refresh_cache-0b1065c2-7923-4dc4-a64f-be72a7994472" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.405027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.405479] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.405970] env[68233]: INFO nova.compute.manager [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Attaching volume 966710f3-d7e3-4a95-bd4c-e592a39ff63d to /dev/sdb [ 943.447698] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a0f1da-7f9b-4600-bc3b-d167acc22143 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.456443] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-964391d9-02af-4d66-b8f4-873813ecd0e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.471137] env[68233]: DEBUG nova.virt.block_device [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating existing volume attachment record: 77086583-3212-4ee3-9a8c-034c9ba5531b {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 943.485661] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5219f0df-91a3-ec9f-4b52-17a3ddf0927f, 'name': SearchDatastore_Task, 'duration_secs': 0.010416} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.485921] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.486550] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0b1065c2-7923-4dc4-a64f-be72a7994472/0b1065c2-7923-4dc4-a64f-be72a7994472.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.486550] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-069cdd78-d54f-4dfc-a7af-02d6fc304519 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.510655] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 943.510655] env[68233]: value = "task-2782650" [ 943.510655] env[68233]: _type = "Task" [ 943.510655] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.520108] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782650, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.565883] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.565883] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.598505] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.785331] env[68233]: DEBUG nova.scheduler.client.report [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 944.022613] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782650, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495593} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.022982] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0b1065c2-7923-4dc4-a64f-be72a7994472/0b1065c2-7923-4dc4-a64f-be72a7994472.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.023108] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.023377] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1970e8c9-e78c-40c9-b0a1-da37234652ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.032066] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 944.032066] env[68233]: value = "task-2782654" [ 944.032066] env[68233]: _type = "Task" [ 944.032066] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.039863] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.067981] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 944.265644] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 944.294155] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.297057] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.822s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.298257] env[68233]: DEBUG nova.objects.instance [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lazy-loading 'resources' on Instance uuid 6ceb7d2d-143a-464a-aca5-6b6838630bb8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 944.306965] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.307235] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.307409] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.307594] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.307774] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.307917] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.308312] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.308548] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.308803] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.309079] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.309506] env[68233]: DEBUG nova.virt.hardware [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.310574] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda5fb24-8b9d-4969-a29e-ad06bebeea20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.316856] env[68233]: INFO nova.scheduler.client.report [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted allocations for instance 1207585c-fb2a-43b7-aec2-c3a7889255a5 [ 944.326279] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446a1c63-c5ae-4f45-aa44-ddac701d004a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.542673] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074711} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.542927] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.543710] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd81e47f-ea4e-4a74-b23f-cd66e49cffb8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.566278] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 0b1065c2-7923-4dc4-a64f-be72a7994472/0b1065c2-7923-4dc4-a64f-be72a7994472.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.566588] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-252f914e-fb64-4a1b-8ffa-352500644dcc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.590081] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 944.590081] env[68233]: value = "task-2782655" [ 944.590081] env[68233]: _type = "Task" [ 944.590081] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.599429] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782655, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.600839] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.753166] env[68233]: DEBUG nova.compute.manager [req-0e56b0e1-1903-4322-8a47-2b373e7d7178 req-a25721b7-96b5-47a7-ba97-adadf5487c6d service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Received event network-vif-plugged-aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 944.754078] env[68233]: DEBUG oslo_concurrency.lockutils [req-0e56b0e1-1903-4322-8a47-2b373e7d7178 req-a25721b7-96b5-47a7-ba97-adadf5487c6d service nova] Acquiring lock "7831d420-5a0a-4901-b7fe-95307b4b61f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.754078] env[68233]: DEBUG oslo_concurrency.lockutils [req-0e56b0e1-1903-4322-8a47-2b373e7d7178 req-a25721b7-96b5-47a7-ba97-adadf5487c6d service nova] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.754538] env[68233]: DEBUG oslo_concurrency.lockutils [req-0e56b0e1-1903-4322-8a47-2b373e7d7178 req-a25721b7-96b5-47a7-ba97-adadf5487c6d service nova] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.754848] env[68233]: DEBUG nova.compute.manager [req-0e56b0e1-1903-4322-8a47-2b373e7d7178 req-a25721b7-96b5-47a7-ba97-adadf5487c6d service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] No waiting events found dispatching network-vif-plugged-aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 944.755635] env[68233]: WARNING nova.compute.manager [req-0e56b0e1-1903-4322-8a47-2b373e7d7178 req-a25721b7-96b5-47a7-ba97-adadf5487c6d service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Received unexpected event network-vif-plugged-aa886750-c433-4287-826b-2f74ab52f0d0 for instance with vm_state building and task_state spawning. [ 944.829556] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14da0db9-f752-48d1-ab04-42ac20259b90 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "1207585c-fb2a-43b7-aec2-c3a7889255a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.756s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.847644] env[68233]: DEBUG nova.network.neutron [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Successfully updated port: aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 945.104367] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782655, 'name': ReconfigVM_Task, 'duration_secs': 0.299327} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.104672] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 0b1065c2-7923-4dc4-a64f-be72a7994472/0b1065c2-7923-4dc4-a64f-be72a7994472.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.105704] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9121d051-0bcb-41cf-9c67-c5266686f959 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.116445] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 945.116445] env[68233]: value = "task-2782656" [ 945.116445] env[68233]: _type = "Task" [ 945.116445] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.131478] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782656, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.179265] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e13990-7f0b-48a2-9cda-988bbbac9793 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.191949] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc882dc-77b5-4f23-ae97-a3a08166389e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.237851] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "c5b42243-878f-4150-a5d3-63d69e474bd1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.238136] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.238344] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "c5b42243-878f-4150-a5d3-63d69e474bd1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.238530] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.238702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.241315] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3aacc5d-0ce4-47f5-9fd5-b4006c2b3251 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.244231] env[68233]: INFO nova.compute.manager [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Terminating instance [ 945.251406] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101ad13e-004c-4755-93cb-d779e1803386 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.268208] env[68233]: DEBUG nova.compute.provider_tree [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.350129] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-7831d420-5a0a-4901-b7fe-95307b4b61f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 945.350284] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-7831d420-5a0a-4901-b7fe-95307b4b61f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.350435] env[68233]: DEBUG nova.network.neutron [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 945.628803] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782656, 'name': Rename_Task, 'duration_secs': 0.21084} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.629121] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.629332] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c277394-6d41-4db6-96a5-35d8af39b5e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.637282] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 945.637282] env[68233]: value = "task-2782657" [ 945.637282] env[68233]: _type = "Task" [ 945.637282] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.648100] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782657, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.734568] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "edf4bfac-175b-40b7-bf08-298c4735bfae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.734798] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.748442] env[68233]: DEBUG nova.compute.manager [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.748659] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.750037] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359c647c-3d51-4d8b-a92a-c52bfc244617 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.758621] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.758910] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fd2430b-04a8-4e0c-9d3c-e45f1f64a395 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.765861] env[68233]: DEBUG oslo_vmware.api [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 945.765861] env[68233]: value = "task-2782658" [ 945.765861] env[68233]: _type = "Task" [ 945.765861] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.771275] env[68233]: DEBUG nova.scheduler.client.report [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.777442] env[68233]: DEBUG oslo_vmware.api [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782658, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.882018] env[68233]: DEBUG nova.network.neutron [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 946.036525] env[68233]: DEBUG nova.network.neutron [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Updating instance_info_cache with network_info: [{"id": "aa886750-c433-4287-826b-2f74ab52f0d0", "address": "fa:16:3e:ee:98:21", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa886750-c4", "ovs_interfaceid": "aa886750-c433-4287-826b-2f74ab52f0d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.147676] env[68233]: DEBUG oslo_vmware.api [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782657, 'name': PowerOnVM_Task, 'duration_secs': 0.47246} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.148205] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.148467] env[68233]: INFO nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Took 6.33 seconds to spawn the instance on the hypervisor. [ 946.148662] env[68233]: DEBUG nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.149520] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd788ef2-5583-4335-9083-c2bf9ef07eeb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.237270] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 946.277777] env[68233]: DEBUG oslo_vmware.api [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782658, 'name': PowerOffVM_Task, 'duration_secs': 0.247284} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.278491] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.280412] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.280603] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.281078] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.015s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.281297] env[68233]: DEBUG nova.objects.instance [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lazy-loading 'resources' on Instance uuid 32e05800-e812-412a-b049-89178737cffd {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.282306] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39c6b3e2-aa11-44a0-b372-0d4cbd5c36f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.302792] env[68233]: INFO nova.scheduler.client.report [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted allocations for instance 6ceb7d2d-143a-464a-aca5-6b6838630bb8 [ 946.356220] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.356395] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.356569] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleting the datastore file [datastore2] c5b42243-878f-4150-a5d3-63d69e474bd1 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.356824] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de49d8c0-9f4c-43c1-b603-8b824c64bb82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.363557] env[68233]: DEBUG oslo_vmware.api [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 946.363557] env[68233]: value = "task-2782661" [ 946.363557] env[68233]: _type = "Task" [ 946.363557] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.371728] env[68233]: DEBUG oslo_vmware.api [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.540141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-7831d420-5a0a-4901-b7fe-95307b4b61f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.540285] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Instance network_info: |[{"id": "aa886750-c433-4287-826b-2f74ab52f0d0", "address": "fa:16:3e:ee:98:21", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa886750-c4", "ovs_interfaceid": "aa886750-c433-4287-826b-2f74ab52f0d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 946.540710] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:98:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa886750-c433-4287-826b-2f74ab52f0d0', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 946.548366] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.548654] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 946.548904] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fb59b2f-656e-4769-9fa5-e6c692d9f63d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.570183] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 946.570183] env[68233]: value = "task-2782662" [ 946.570183] env[68233]: _type = "Task" [ 946.570183] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.579596] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782662, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.671170] env[68233]: INFO nova.compute.manager [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Took 23.34 seconds to build instance. [ 946.759663] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.809427] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1aeefc6e-55b1-42bd-9594-c1664c547a79 tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "6ceb7d2d-143a-464a-aca5-6b6838630bb8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.786s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.878493] env[68233]: DEBUG nova.compute.manager [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Received event network-changed-aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 946.878701] env[68233]: DEBUG nova.compute.manager [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Refreshing instance network info cache due to event network-changed-aa886750-c433-4287-826b-2f74ab52f0d0. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 946.878912] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] Acquiring lock "refresh_cache-7831d420-5a0a-4901-b7fe-95307b4b61f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.879073] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] Acquired lock "refresh_cache-7831d420-5a0a-4901-b7fe-95307b4b61f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.879238] env[68233]: DEBUG nova.network.neutron [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Refreshing network info cache for port aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 946.889595] env[68233]: DEBUG oslo_vmware.api [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.363817} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.890392] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.890577] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.890780] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.890988] env[68233]: INFO nova.compute.manager [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Took 1.14 seconds to destroy the instance on the hypervisor. [ 946.891258] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.891541] env[68233]: DEBUG nova.compute.manager [-] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.891583] env[68233]: DEBUG nova.network.neutron [-] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 947.084167] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782662, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.119841] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.120177] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.120407] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.120751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.120794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.123068] env[68233]: INFO nova.compute.manager [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Terminating instance [ 947.177086] env[68233]: DEBUG oslo_concurrency.lockutils [None req-96a8d717-a2e5-46d8-8b5b-f76eb2df934f tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.857s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.205527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f6883c-049c-4de8-a2f3-9407a49dd0ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.216110] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebcf9b5-e6a8-4733-a166-579d045e5c04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.248865] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afd57c5-6558-4f86-b9c0-8e11ef1dc9fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.260569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ace0aa-a559-4f60-90d6-65b6dd0d5981 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.277650] env[68233]: DEBUG nova.compute.provider_tree [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 947.581910] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782662, 'name': CreateVM_Task, 'duration_secs': 0.515759} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.582214] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 947.583136] env[68233]: DEBUG nova.network.neutron [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Updated VIF entry in instance network info cache for port aa886750-c433-4287-826b-2f74ab52f0d0. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 947.583462] env[68233]: DEBUG nova.network.neutron [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Updating instance_info_cache with network_info: [{"id": "aa886750-c433-4287-826b-2f74ab52f0d0", "address": "fa:16:3e:ee:98:21", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa886750-c4", "ovs_interfaceid": "aa886750-c433-4287-826b-2f74ab52f0d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.584979] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.586104] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 947.586453] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 947.587285] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-46639561-25d1-46c4-b617-f14336fc2a8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.592709] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 947.592709] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521d207f-acf2-e962-6934-11b23e2621ec" [ 947.592709] env[68233]: _type = "Task" [ 947.592709] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.601266] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521d207f-acf2-e962-6934-11b23e2621ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.637022] env[68233]: DEBUG nova.compute.manager [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 947.637022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 947.637022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fce6542-8c25-479a-ad34-44b15667f60a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.647897] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 947.647897] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37e9311e-5822-4341-90bc-680da7c9545f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.657440] env[68233]: DEBUG oslo_vmware.api [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 947.657440] env[68233]: value = "task-2782663" [ 947.657440] env[68233]: _type = "Task" [ 947.657440] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.669024] env[68233]: DEBUG oslo_vmware.api [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782663, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.720210] env[68233]: DEBUG nova.network.neutron [-] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.755028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "0b1065c2-7923-4dc4-a64f-be72a7994472" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.757919] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.757919] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "0b1065c2-7923-4dc4-a64f-be72a7994472-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.757919] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.757919] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.758232] env[68233]: INFO nova.compute.manager [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Terminating instance [ 947.800570] env[68233]: ERROR nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [req-23ddd11a-c098-4896-a585-e28baefc8b6c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-23ddd11a-c098-4896-a585-e28baefc8b6c"}]} [ 947.817012] env[68233]: DEBUG nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 947.831415] env[68233]: DEBUG nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 947.831415] env[68233]: DEBUG nova.compute.provider_tree [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 947.844165] env[68233]: DEBUG nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 947.863067] env[68233]: DEBUG nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 948.021126] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 948.021361] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559444', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'name': 'volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '13972b73-8bae-4a2a-a987-b6177381e7c8', 'attached_at': '', 'detached_at': '', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'serial': '966710f3-d7e3-4a95-bd4c-e592a39ff63d'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 948.022400] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d65408-50af-47db-b0a7-0683df758009 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.044721] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4171ab-bed5-46ff-b094-a753c1429762 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.047378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "990e1a66-f2ab-4925-b1da-58cdc41a6315" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.047671] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.047923] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "990e1a66-f2ab-4925-b1da-58cdc41a6315-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.048145] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.048320] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.050555] env[68233]: INFO nova.compute.manager [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Terminating instance [ 948.075885] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d/volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 948.079564] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-164ec670-d29c-46db-801f-c49e2ff84631 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.093389] env[68233]: DEBUG oslo_concurrency.lockutils [req-bb7bfc8f-a6ad-4811-8344-e28a85d3682f req-2757c9e2-8acb-402f-82ff-a36cc0ba1517 service nova] Releasing lock "refresh_cache-7831d420-5a0a-4901-b7fe-95307b4b61f0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.099822] env[68233]: DEBUG oslo_vmware.api [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 948.099822] env[68233]: value = "task-2782664" [ 948.099822] env[68233]: _type = "Task" [ 948.099822] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.103778] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521d207f-acf2-e962-6934-11b23e2621ec, 'name': SearchDatastore_Task, 'duration_secs': 0.035351} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.109016] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 948.109255] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 948.109515] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.109687] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.109889] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 948.110395] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-933763e8-7538-43e5-8474-0e4b10d06650 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.118331] env[68233]: DEBUG oslo_vmware.api [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782664, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.132991] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 948.133235] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 948.136767] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d42e606e-1ca8-4a48-aa40-7615a626750b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.143471] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 948.143471] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bc3d0b-d19c-bcf1-61e0-486eb0603876" [ 948.143471] env[68233]: _type = "Task" [ 948.143471] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.154402] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bc3d0b-d19c-bcf1-61e0-486eb0603876, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.167836] env[68233]: DEBUG oslo_vmware.api [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782663, 'name': PowerOffVM_Task, 'duration_secs': 0.476097} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.170673] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.170871] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.171348] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7c28875-ff94-457d-9d86-cce01eedec63 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.222996] env[68233]: INFO nova.compute.manager [-] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Took 1.33 seconds to deallocate network for instance. [ 948.233584] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc14594-1a3c-4426-bf35-f38d5aa656e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.243406] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3751e8e5-70dd-40f2-95e8-37c66628979b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.252386] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.253264] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.253264] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleting the datastore file [datastore2] c6a358b7-0e6a-43bb-a171-5e6175f947bd {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.253264] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ea08c69-b05f-4aad-8a98-b6c472477e4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.280837] env[68233]: DEBUG nova.compute.manager [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 948.281197] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.283627] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1d8bf3-2d80-4116-af1a-c70fa46bec40 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.287266] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a33885b-01e4-4efa-9993-8f24c8056789 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.291823] env[68233]: DEBUG oslo_vmware.api [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for the task: (returnval){ [ 948.291823] env[68233]: value = "task-2782666" [ 948.291823] env[68233]: _type = "Task" [ 948.291823] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.300467] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.302297] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c61036d-21fe-49d8-b791-b01fbab174e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.306728] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf71b493-090a-4cf6-8c02-0e0d57642d84 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.311805] env[68233]: DEBUG oslo_vmware.api [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.326057] env[68233]: DEBUG nova.compute.provider_tree [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.328886] env[68233]: DEBUG oslo_vmware.api [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 948.328886] env[68233]: value = "task-2782667" [ 948.328886] env[68233]: _type = "Task" [ 948.328886] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.339128] env[68233]: DEBUG oslo_vmware.api [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.577421] env[68233]: DEBUG nova.compute.manager [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 948.577656] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.578570] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b778521-3e89-4392-b192-577b32da136a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.587203] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 948.587427] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13ccf95e-4f82-4505-bb60-64d6021ef948 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.593400] env[68233]: DEBUG oslo_vmware.api [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 948.593400] env[68233]: value = "task-2782668" [ 948.593400] env[68233]: _type = "Task" [ 948.593400] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.601125] env[68233]: DEBUG oslo_vmware.api [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782668, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.613075] env[68233]: DEBUG oslo_vmware.api [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782664, 'name': ReconfigVM_Task, 'duration_secs': 0.464981} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.613362] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfigured VM instance instance-0000002f to attach disk [datastore2] volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d/volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.618523] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8eda5ec0-2519-45f5-b412-68e992520c3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.634991] env[68233]: DEBUG oslo_vmware.api [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 948.634991] env[68233]: value = "task-2782669" [ 948.634991] env[68233]: _type = "Task" [ 948.634991] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.645871] env[68233]: DEBUG oslo_vmware.api [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782669, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.655186] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bc3d0b-d19c-bcf1-61e0-486eb0603876, 'name': SearchDatastore_Task, 'duration_secs': 0.011326} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.656016] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bbba3d4-ee03-40b7-897c-9511403dd91d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.662605] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 948.662605] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52728fbc-4147-63a2-d692-45c386a27516" [ 948.662605] env[68233]: _type = "Task" [ 948.662605] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.673036] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52728fbc-4147-63a2-d692-45c386a27516, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.729435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.804197] env[68233]: DEBUG oslo_vmware.api [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Task: {'id': task-2782666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146476} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.804474] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 948.804668] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 948.804857] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 948.805319] env[68233]: INFO nova.compute.manager [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Took 1.17 seconds to destroy the instance on the hypervisor. [ 948.805438] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 948.805725] env[68233]: DEBUG nova.compute.manager [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 948.805866] env[68233]: DEBUG nova.network.neutron [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 948.849378] env[68233]: DEBUG oslo_vmware.api [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782667, 'name': PowerOffVM_Task, 'duration_secs': 0.212782} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.849706] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 948.850091] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.850262] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-23051e2a-970d-4a1c-93c3-b011880cc222 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.870252] env[68233]: DEBUG nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 105 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 948.870252] env[68233]: DEBUG nova.compute.provider_tree [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 105 to 106 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 948.870252] env[68233]: DEBUG nova.compute.provider_tree [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 948.923723] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.923961] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.924168] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore2] 0b1065c2-7923-4dc4-a64f-be72a7994472 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.924425] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a88b37b2-6328-467f-be60-3943b6473246 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.932302] env[68233]: DEBUG oslo_vmware.api [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 948.932302] env[68233]: value = "task-2782671" [ 948.932302] env[68233]: _type = "Task" [ 948.932302] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.941231] env[68233]: DEBUG oslo_vmware.api [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782671, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.059949] env[68233]: DEBUG nova.compute.manager [req-f9804d86-4591-48a6-82e9-da8543a07cab req-2db776c1-2d19-4297-baea-3179a6c299d6 service nova] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Received event network-vif-deleted-99d9f200-1c6c-4ab7-9482-aa557fad7711 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 949.102918] env[68233]: DEBUG oslo_vmware.api [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782668, 'name': PowerOffVM_Task, 'duration_secs': 0.227787} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.103230] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 949.103400] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 949.103659] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a08612c8-0476-40f9-be0c-365e6a6f8c8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.146891] env[68233]: DEBUG oslo_vmware.api [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782669, 'name': ReconfigVM_Task, 'duration_secs': 0.203321} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.147293] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559444', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'name': 'volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '13972b73-8bae-4a2a-a987-b6177381e7c8', 'attached_at': '', 'detached_at': '', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'serial': '966710f3-d7e3-4a95-bd4c-e592a39ff63d'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 949.178440] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52728fbc-4147-63a2-d692-45c386a27516, 'name': SearchDatastore_Task, 'duration_secs': 0.018161} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.180237] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.180549] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 7831d420-5a0a-4901-b7fe-95307b4b61f0/7831d420-5a0a-4901-b7fe-95307b4b61f0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 949.181218] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27cad566-6501-45a6-8949-16eebafc5461 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.186422] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 949.186799] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 949.187123] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Deleting the datastore file [datastore2] 990e1a66-f2ab-4925-b1da-58cdc41a6315 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 949.188148] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d3608c39-2ede-4afd-87a5-20d4dbf3560e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.193196] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 949.193196] env[68233]: value = "task-2782673" [ 949.193196] env[68233]: _type = "Task" [ 949.193196] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.200993] env[68233]: DEBUG oslo_vmware.api [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for the task: (returnval){ [ 949.200993] env[68233]: value = "task-2782674" [ 949.200993] env[68233]: _type = "Task" [ 949.200993] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.205100] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.210446] env[68233]: DEBUG oslo_vmware.api [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782674, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.377173] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.096s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.379892] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.309s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.380223] env[68233]: DEBUG nova.objects.instance [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lazy-loading 'resources' on Instance uuid 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 949.400893] env[68233]: INFO nova.scheduler.client.report [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted allocations for instance 32e05800-e812-412a-b049-89178737cffd [ 949.443764] env[68233]: DEBUG oslo_vmware.api [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782671, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15822} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.443764] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.443764] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.443764] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.444090] env[68233]: INFO nova.compute.manager [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Took 1.16 seconds to destroy the instance on the hypervisor. [ 949.444127] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.444340] env[68233]: DEBUG nova.compute.manager [-] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.444416] env[68233]: DEBUG nova.network.neutron [-] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.681830] env[68233]: DEBUG nova.network.neutron [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.706136] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782673, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.714534] env[68233]: DEBUG oslo_vmware.api [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Task: {'id': task-2782674, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218123} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.714534] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.714978] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.714978] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.715117] env[68233]: INFO nova.compute.manager [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Took 1.14 seconds to destroy the instance on the hypervisor. [ 949.715374] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 949.715432] env[68233]: DEBUG nova.compute.manager [-] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 949.715529] env[68233]: DEBUG nova.network.neutron [-] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 949.911961] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c76455e-f830-4fd1-a9c2-046152e63098 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "32e05800-e812-412a-b049-89178737cffd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.602s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.185035] env[68233]: INFO nova.compute.manager [-] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Took 1.38 seconds to deallocate network for instance. [ 950.191016] env[68233]: DEBUG nova.objects.instance [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'flavor' on Instance uuid 13972b73-8bae-4a2a-a987-b6177381e7c8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 950.205769] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782673, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.002287} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.210029] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 7831d420-5a0a-4901-b7fe-95307b4b61f0/7831d420-5a0a-4901-b7fe-95307b4b61f0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 950.210029] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 950.210029] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b548101-84f0-4f51-978f-0459322445e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.211196] env[68233]: DEBUG nova.network.neutron [-] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.221827] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 950.221827] env[68233]: value = "task-2782675" [ 950.221827] env[68233]: _type = "Task" [ 950.221827] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.232826] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.332015] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a277b2-82fd-44c6-90f9-18e681c82970 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.340650] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e19599b-2bee-46df-84cf-9259057da944 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.375446] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8762764-7a94-4ae1-af8e-bf2e693b2c0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.384042] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f3d7e8-1bb5-4568-849f-0ade1a9c5930 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.398899] env[68233]: DEBUG nova.compute.provider_tree [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 950.697166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.699690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27ddfa2-452a-46dd-a927-f6f960519f68 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.294s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.717026] env[68233]: INFO nova.compute.manager [-] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Took 1.27 seconds to deallocate network for instance. [ 950.732401] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180294} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.732774] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 950.734525] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4dd7b7-75d7-4bc5-8d04-a47638d60932 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.757855] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 7831d420-5a0a-4901-b7fe-95307b4b61f0/7831d420-5a0a-4901-b7fe-95307b4b61f0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 950.758668] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cd8d897-70cb-4e14-ac64-af78fe6d920d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.780474] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 950.780474] env[68233]: value = "task-2782676" [ 950.780474] env[68233]: _type = "Task" [ 950.780474] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.789875] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782676, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.820017] env[68233]: DEBUG nova.network.neutron [-] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.903430] env[68233]: DEBUG nova.scheduler.client.report [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 950.928853] env[68233]: INFO nova.compute.manager [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Rescuing [ 950.928853] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.930197] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.930197] env[68233]: DEBUG nova.network.neutron [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 951.093064] env[68233]: DEBUG nova.compute.manager [req-c83a7e72-edfb-4de1-ba42-2d25199eb9dd req-8750eff9-451c-403a-9dcd-f166e3e06826 service nova] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Received event network-vif-deleted-5bbc186d-7708-4c96-a2a7-454a8aae1e5c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 951.093291] env[68233]: DEBUG nova.compute.manager [req-c83a7e72-edfb-4de1-ba42-2d25199eb9dd req-8750eff9-451c-403a-9dcd-f166e3e06826 service nova] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Received event network-vif-deleted-4b5fdae0-2826-4f7e-89fe-1d02f303de0e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 951.093502] env[68233]: DEBUG nova.compute.manager [req-c83a7e72-edfb-4de1-ba42-2d25199eb9dd req-8750eff9-451c-403a-9dcd-f166e3e06826 service nova] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Received event network-vif-deleted-67e903f4-4173-44e7-a2c8-1d949ad0bd0d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 951.224902] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.290678] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782676, 'name': ReconfigVM_Task, 'duration_secs': 0.27965} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.291345] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 7831d420-5a0a-4901-b7fe-95307b4b61f0/7831d420-5a0a-4901-b7fe-95307b4b61f0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 951.291605] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4393027b-7aa0-40ab-b1ab-91a75ccf67cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.299391] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 951.299391] env[68233]: value = "task-2782677" [ 951.299391] env[68233]: _type = "Task" [ 951.299391] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.307743] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782677, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.322408] env[68233]: INFO nova.compute.manager [-] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Took 1.61 seconds to deallocate network for instance. [ 951.409074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.029s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.411581] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.304s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.413517] env[68233]: INFO nova.compute.claims [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 951.435298] env[68233]: INFO nova.scheduler.client.report [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Deleted allocations for instance 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9 [ 951.734525] env[68233]: DEBUG nova.network.neutron [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.809911] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782677, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.829431] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.942538] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5d8a742c-fac2-4782-b35a-89575160d010 tempest-ServerRescueTestJSON-1299350510 tempest-ServerRescueTestJSON-1299350510-project-member] Lock "16f20fab-ccf8-4a47-ae7d-9ab55932c5c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.344s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.237355] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.311418] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782677, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.785568] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ca86a2-9de9-4797-ae74-5beedb15e9ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.794658] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6ffa40-b8b5-4625-a6ab-dc95b883e103 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.831777] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a25ef75-3791-47d9-8944-1d02a8745893 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.837319] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782677, 'name': Rename_Task, 'duration_secs': 1.153703} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.837971] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.838220] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ad90c3f-b299-469c-9737-5dd3ec702d07 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.843655] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdd92b8-bb15-4a98-a533-bfa7235c3b72 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.848739] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 952.848739] env[68233]: value = "task-2782678" [ 952.848739] env[68233]: _type = "Task" [ 952.848739] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.859763] env[68233]: DEBUG nova.compute.provider_tree [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 952.866620] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782678, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.359891] env[68233]: DEBUG oslo_vmware.api [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782678, 'name': PowerOnVM_Task, 'duration_secs': 0.497706} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.360324] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 953.360626] env[68233]: INFO nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Took 9.10 seconds to spawn the instance on the hypervisor. [ 953.360892] env[68233]: DEBUG nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.361870] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f383ce52-772c-494c-a0b1-efeb22c925db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.365187] env[68233]: DEBUG nova.scheduler.client.report [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.792035] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.792454] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14dd8085-dfb7-41b6-a733-98e8fd26354a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.801698] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 953.801698] env[68233]: value = "task-2782679" [ 953.801698] env[68233]: _type = "Task" [ 953.801698] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.810492] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.870955] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.871555] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 953.874466] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.167s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.874692] env[68233]: DEBUG nova.objects.instance [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lazy-loading 'resources' on Instance uuid 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 953.883332] env[68233]: INFO nova.compute.manager [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Took 27.63 seconds to build instance. [ 954.314448] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782679, 'name': PowerOffVM_Task, 'duration_secs': 0.487363} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.314706] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.315517] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18318f67-a9ed-4124-91c9-6fcf44d8c3de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.336762] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1c2561-ffca-4217-b346-cd99ccd9f5b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.370200] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.370519] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ade39d89-82bd-4521-8e13-9e31d45a0ee3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.378064] env[68233]: DEBUG nova.compute.utils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 954.382064] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 954.382064] env[68233]: value = "task-2782680" [ 954.382064] env[68233]: _type = "Task" [ 954.382064] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.382326] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 954.382498] env[68233]: DEBUG nova.network.neutron [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 954.384758] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ad013730-ab8f-4133-9324-bc654e13d729 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.142s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.394448] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 954.394705] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.394904] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.395068] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.395249] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.395484] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a360d24c-1205-4d7d-b63b-cf417f98d841 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.403251] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.403430] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.404170] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bac6f90-0b8b-44d7-9b0c-b5ada0e54371 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.409243] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 954.409243] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fe32eb-97fe-2fd5-4ea8-001d154f7adc" [ 954.409243] env[68233]: _type = "Task" [ 954.409243] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.418883] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fe32eb-97fe-2fd5-4ea8-001d154f7adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.476841] env[68233]: DEBUG nova.policy [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b450a75286a9438081aa60c4b5cfeab3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9df7c30630584a2bb79e798dcc571850', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 954.660737] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adfad30-6f9e-462c-bacc-d2072cca971f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.673645] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e39824d-535e-4ec7-9844-83cfa3693b7d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.712822] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b64c65d-06c6-4a4f-86dc-d0b7f154ab2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.724393] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00211713-62b8-45a4-957e-e85322470b06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.736338] env[68233]: DEBUG nova.compute.provider_tree [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.883268] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 954.920061] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fe32eb-97fe-2fd5-4ea8-001d154f7adc, 'name': SearchDatastore_Task, 'duration_secs': 0.010763} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.920869] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-198bc3bd-807b-4f17-b453-ffe7a649d7e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.927868] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 954.927868] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa4010-734b-7180-019a-4393f038a55d" [ 954.927868] env[68233]: _type = "Task" [ 954.927868] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.937521] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa4010-734b-7180-019a-4393f038a55d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.073848] env[68233]: DEBUG oslo_concurrency.lockutils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.074285] env[68233]: DEBUG oslo_concurrency.lockutils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.113888] env[68233]: DEBUG nova.network.neutron [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Successfully created port: be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 955.239799] env[68233]: DEBUG nova.scheduler.client.report [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 955.439160] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa4010-734b-7180-019a-4393f038a55d, 'name': SearchDatastore_Task, 'duration_secs': 0.011357} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.439444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "[datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.439723] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. {{(pid=68233) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 955.439989] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ab09458-a147-4346-a027-4effa2c0f3c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.447737] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 955.447737] env[68233]: value = "task-2782681" [ 955.447737] env[68233]: _type = "Task" [ 955.447737] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.457075] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782681, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.578696] env[68233]: DEBUG nova.compute.utils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.744890] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.870s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.748373] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 14.904s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.748808] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.748882] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 955.749178] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.151s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.751049] env[68233]: INFO nova.compute.claims [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.754761] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41eb6f6c-df2e-4962-ab41-50c79198c400 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.764398] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56535fd-d533-4ea2-aa83-9fecc156d161 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.770971] env[68233]: INFO nova.scheduler.client.report [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Deleted allocations for instance 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98 [ 955.783402] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2234a22-bb72-4888-b025-ba7cdc65e06c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.791857] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764e229d-2eda-4d86-9cca-43cad910a3b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.824634] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177772MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 955.824832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.894816] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 955.924871] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 955.925186] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 955.925349] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 955.925539] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 955.925681] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 955.925867] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 955.926099] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 955.926265] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 955.926439] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 955.926603] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 955.926774] env[68233]: DEBUG nova.virt.hardware [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 955.927787] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff29a5ef-4ba6-43d6-b972-8103bd5f95f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.939480] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc3818b-fa98-49ab-b11a-a93616996c7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.964064] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782681, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.080194] env[68233]: DEBUG oslo_concurrency.lockutils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.157263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.157548] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.157807] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.158028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.158207] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.160984] env[68233]: INFO nova.compute.manager [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Terminating instance [ 956.288985] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a9de61a1-450d-4b94-97cc-c62b7e59e958 tempest-ListImageFiltersTestJSON-487029796 tempest-ListImageFiltersTestJSON-487029796-project-member] Lock "4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.138s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.465416] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782681, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540119} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.465684] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk. [ 956.466503] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8b5e3b-cf7c-4d94-bd7c-3a9d731b6987 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.493883] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 956.495573] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ea6b8d2-56a6-47cd-95b2-36fcd76bd9e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.513101] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 956.513101] env[68233]: value = "task-2782682" [ 956.513101] env[68233]: _type = "Task" [ 956.513101] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.520783] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782682, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.664925] env[68233]: DEBUG nova.compute.manager [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 956.665168] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.666420] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6abc091-b376-4bf8-bef5-287a3bab03a1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.675328] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.675592] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f3fafa8e-584d-47f6-9019-bbb0f3a35f19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.682455] env[68233]: DEBUG oslo_vmware.api [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 956.682455] env[68233]: value = "task-2782683" [ 956.682455] env[68233]: _type = "Task" [ 956.682455] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.690697] env[68233]: DEBUG oslo_vmware.api [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782683, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.777475] env[68233]: DEBUG nova.compute.manager [req-0555ce16-738d-4c21-8a69-b2ce58eaa619 req-d85844f3-007c-430b-bf15-e0deb8fa7e5f service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-vif-plugged-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 956.777716] env[68233]: DEBUG oslo_concurrency.lockutils [req-0555ce16-738d-4c21-8a69-b2ce58eaa619 req-d85844f3-007c-430b-bf15-e0deb8fa7e5f service nova] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.777937] env[68233]: DEBUG oslo_concurrency.lockutils [req-0555ce16-738d-4c21-8a69-b2ce58eaa619 req-d85844f3-007c-430b-bf15-e0deb8fa7e5f service nova] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.778348] env[68233]: DEBUG oslo_concurrency.lockutils [req-0555ce16-738d-4c21-8a69-b2ce58eaa619 req-d85844f3-007c-430b-bf15-e0deb8fa7e5f service nova] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.778427] env[68233]: DEBUG nova.compute.manager [req-0555ce16-738d-4c21-8a69-b2ce58eaa619 req-d85844f3-007c-430b-bf15-e0deb8fa7e5f service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] No waiting events found dispatching network-vif-plugged-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.778584] env[68233]: WARNING nova.compute.manager [req-0555ce16-738d-4c21-8a69-b2ce58eaa619 req-d85844f3-007c-430b-bf15-e0deb8fa7e5f service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received unexpected event network-vif-plugged-be512a20-e94b-4c51-8658-24c6e1feba94 for instance with vm_state building and task_state spawning. [ 956.826067] env[68233]: DEBUG nova.network.neutron [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Successfully updated port: be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.025823] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782682, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.084315] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab514073-78c2-490d-94c2-24a00e32d1c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.094407] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750c32b6-8d95-48be-9080-c81f35aca46a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.131572] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a378b2ac-f248-4b43-b107-2a01408bd4bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.140447] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe23750-545b-4d17-9df2-f00a4ef3ed05 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.157561] env[68233]: DEBUG oslo_concurrency.lockutils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.157836] env[68233]: DEBUG oslo_concurrency.lockutils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.158143] env[68233]: INFO nova.compute.manager [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Attaching volume b040816d-86e9-41f4-80a3-eb4938cd8774 to /dev/sdb [ 957.160017] env[68233]: DEBUG nova.compute.provider_tree [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 957.191695] env[68233]: DEBUG oslo_vmware.api [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782683, 'name': PowerOffVM_Task, 'duration_secs': 0.235459} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.191982] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.192185] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.192436] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5be6c113-de90-4119-b75a-03560aeb9c4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.198795] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd80c4a-84d3-4f9a-a8d1-75366acaff08 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.205703] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ea6443-9106-4c5a-8d15-949c54897c7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.218882] env[68233]: DEBUG nova.virt.block_device [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Updating existing volume attachment record: 6f32cf42-8d6e-4781-80e8-ee3d6a692f5b {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 957.261616] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.261869] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.262068] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Deleting the datastore file [datastore1] d0d6eed0-db5b-4371-8f03-b3415fd833f0 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.262453] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f88794a4-f9aa-440e-bb52-fd2da5d44d4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.268287] env[68233]: DEBUG oslo_vmware.api [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for the task: (returnval){ [ 957.268287] env[68233]: value = "task-2782685" [ 957.268287] env[68233]: _type = "Task" [ 957.268287] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.276422] env[68233]: DEBUG oslo_vmware.api [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782685, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.328298] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.328447] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.328595] env[68233]: DEBUG nova.network.neutron [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 957.527408] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782682, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.664914] env[68233]: DEBUG nova.scheduler.client.report [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.778794] env[68233]: DEBUG oslo_vmware.api [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Task: {'id': task-2782685, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340987} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.778905] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.779157] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.779352] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.779574] env[68233]: INFO nova.compute.manager [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Took 1.11 seconds to destroy the instance on the hypervisor. [ 957.779860] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.780653] env[68233]: DEBUG nova.compute.manager [-] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 957.780653] env[68233]: DEBUG nova.network.neutron [-] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 957.911713] env[68233]: DEBUG nova.network.neutron [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 958.027246] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782682, 'name': ReconfigVM_Task, 'duration_secs': 1.062586} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.027701] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8/da133fda-e1e2-42a1-a7e0-b8b1426a8490-rescue.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.028883] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61fdfdcf-975c-416f-bdd5-fd3555aca637 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.059675] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3c54441-ecba-4cb6-86ec-9f66a9b38f0f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.075461] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 958.075461] env[68233]: value = "task-2782689" [ 958.075461] env[68233]: _type = "Task" [ 958.075461] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.083997] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782689, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.171486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.172084] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 958.174956] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.574s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.176948] env[68233]: INFO nova.compute.claims [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.239589] env[68233]: DEBUG nova.network.neutron [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.588437] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782689, 'name': ReconfigVM_Task, 'duration_secs': 0.275046} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.588721] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 958.588979] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e966a305-50ba-4bc4-aaee-d3acb15b9374 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.595955] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 958.595955] env[68233]: value = "task-2782690" [ 958.595955] env[68233]: _type = "Task" [ 958.595955] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.603950] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782690, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.682509] env[68233]: DEBUG nova.compute.utils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 958.688517] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 958.688791] env[68233]: DEBUG nova.network.neutron [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 958.741777] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.742151] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance network_info: |[{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 958.742793] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:b9:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be512a20-e94b-4c51-8658-24c6e1feba94', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.754787] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating folder: Project (9df7c30630584a2bb79e798dcc571850). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.755149] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a44b917a-86d7-4e88-9070-16872f243c16 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.765777] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created folder: Project (9df7c30630584a2bb79e798dcc571850) in parent group-v559223. [ 958.765985] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating folder: Instances. Parent ref: group-v559448. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 958.766237] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dcd5ac83-fa46-4f53-8894-3e1b2b097909 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.774403] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created folder: Instances in parent group-v559448. [ 958.774647] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 958.774839] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 958.775054] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ad12aeb-292d-425e-816c-0fb39d6295f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.793153] env[68233]: DEBUG nova.policy [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da4cb00bd4c3405c88d8616b66b71e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14d2a0ead80a4efba8420023c31f8f11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 958.799201] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.799201] env[68233]: value = "task-2782693" [ 958.799201] env[68233]: _type = "Task" [ 958.799201] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.807194] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782693, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.915666] env[68233]: DEBUG nova.compute.manager [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 958.915666] env[68233]: DEBUG nova.compute.manager [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing instance network info cache due to event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 958.915960] env[68233]: DEBUG oslo_concurrency.lockutils [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.915960] env[68233]: DEBUG oslo_concurrency.lockutils [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.916102] env[68233]: DEBUG nova.network.neutron [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.106874] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782690, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.189682] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 959.206926] env[68233]: DEBUG nova.network.neutron [-] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.314047] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782693, 'name': CreateVM_Task, 'duration_secs': 0.392728} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.321241] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 959.321241] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.321241] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.324865] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 959.324865] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e26d619f-8d9c-4e63-9571-b769f3d8ca5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.327288] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 959.327288] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dd41d5-9426-ba8c-60e0-aadf143b5181" [ 959.327288] env[68233]: _type = "Task" [ 959.327288] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.338523] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dd41d5-9426-ba8c-60e0-aadf143b5181, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.366659] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.372096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.372096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.372096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.372096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.372096] env[68233]: INFO nova.compute.manager [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Terminating instance [ 959.416640] env[68233]: DEBUG nova.network.neutron [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Successfully created port: 33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 959.608094] env[68233]: DEBUG oslo_vmware.api [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782690, 'name': PowerOnVM_Task, 'duration_secs': 0.570402} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.608094] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.610720] env[68233]: DEBUG nova.compute.manager [None req-cee74611-0456-4a93-9b30-65cfcf61ecb0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 959.612926] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4582729-7ef2-4ed8-8150-a8168a438226 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.647292] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcb04f7-2bcb-4ee7-873d-4a5aababc6b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.658779] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83106a65-14c0-4cb2-83d7-032be0a15fe1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.694624] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1f28dc-953c-425e-8969-222b084517c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.705354] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6462369-f645-47ca-902e-a2139e7261d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.709690] env[68233]: INFO nova.compute.manager [-] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Took 1.93 seconds to deallocate network for instance. [ 959.722699] env[68233]: DEBUG nova.compute.provider_tree [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.838664] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dd41d5-9426-ba8c-60e0-aadf143b5181, 'name': SearchDatastore_Task, 'duration_secs': 0.015087} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.838664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.838904] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.838904] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.839061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.839240] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 959.839502] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6e90792-90bb-4ec1-a3f1-e49d429e7442 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.848016] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 959.848188] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 959.848878] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e1a934b-7a72-43f1-96aa-cd679f21fbf9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.854011] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 959.854011] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d9cf5-692f-f54b-ea11-667f1b567d9f" [ 959.854011] env[68233]: _type = "Task" [ 959.854011] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.861455] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d9cf5-692f-f54b-ea11-667f1b567d9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.876396] env[68233]: DEBUG nova.compute.manager [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 959.876621] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 959.877445] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599b773c-4dba-4dfe-b46e-e35ad2a4009d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.885061] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.885241] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-270d38f8-48ed-4489-b8ea-bd6266587a48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.892895] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 959.892895] env[68233]: value = "task-2782695" [ 959.892895] env[68233]: _type = "Task" [ 959.892895] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.893959] env[68233]: DEBUG nova.network.neutron [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updated VIF entry in instance network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.894307] env[68233]: DEBUG nova.network.neutron [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.904799] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782695, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.213019] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 960.227824] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.229464] env[68233]: DEBUG nova.scheduler.client.report [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 960.254969] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 960.255481] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 960.255766] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 960.256108] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 960.256749] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 960.256749] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 960.256933] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 960.257092] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 960.257370] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 960.257643] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 960.257902] env[68233]: DEBUG nova.virt.hardware [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 960.259447] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad362aa-3d1d-4e3f-8f09-b56c012ab595 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.268888] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7267873e-0819-401d-af3c-4a40faace7f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.365419] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529d9cf5-692f-f54b-ea11-667f1b567d9f, 'name': SearchDatastore_Task, 'duration_secs': 0.014502} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.366240] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3360b8eb-8c7a-4031-8cc7-888894ae5c78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.371740] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 960.371740] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cfa1f5-94ce-594c-7d59-c1fec753e267" [ 960.371740] env[68233]: _type = "Task" [ 960.371740] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.379622] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cfa1f5-94ce-594c-7d59-c1fec753e267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.402084] env[68233]: DEBUG oslo_concurrency.lockutils [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.402354] env[68233]: DEBUG nova.compute.manager [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Received event network-vif-deleted-4969d072-296f-454b-9621-58f95b90a8dd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 960.402563] env[68233]: INFO nova.compute.manager [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Neutron deleted interface 4969d072-296f-454b-9621-58f95b90a8dd; detaching it from the instance and deleting it from the info cache [ 960.403131] env[68233]: DEBUG nova.network.neutron [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.411564] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782695, 'name': PowerOffVM_Task, 'duration_secs': 0.410765} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.411564] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 960.411564] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 960.411564] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f4e673cf-21e4-4cc6-81e2-5045e79cd32d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.486575] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 960.486847] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 960.487410] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Deleting the datastore file [datastore2] 48270554-abe4-4f72-b8b9-5f2de6a9ed26 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 960.487410] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c416acd-2cc1-45b9-9a03-461f222ef591 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.493327] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for the task: (returnval){ [ 960.493327] env[68233]: value = "task-2782697" [ 960.493327] env[68233]: _type = "Task" [ 960.493327] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.501401] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.734249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.734707] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 960.737795] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.978s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.739282] env[68233]: INFO nova.compute.claims [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 960.777730] env[68233]: INFO nova.compute.manager [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Unrescuing [ 960.778829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.779080] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.779284] env[68233]: DEBUG nova.network.neutron [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 960.882196] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cfa1f5-94ce-594c-7d59-c1fec753e267, 'name': SearchDatastore_Task, 'duration_secs': 0.029745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.882458] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.882715] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.882973] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70439801-1f63-499f-9e76-506889e5cf6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.888728] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 960.888728] env[68233]: value = "task-2782698" [ 960.888728] env[68233]: _type = "Task" [ 960.888728] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.896247] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.905966] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5952301c-369c-43be-bd42-305e142b7d7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.913878] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d245e36-c383-4706-b655-04d1b4390238 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.948825] env[68233]: DEBUG nova.compute.manager [req-8213206c-f097-4007-95ed-6e45447ecb04 req-ae5c98a5-a66f-4259-9e39-70ac4ee6ada3 service nova] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Detach interface failed, port_id=4969d072-296f-454b-9621-58f95b90a8dd, reason: Instance d0d6eed0-db5b-4371-8f03-b3415fd833f0 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 961.003296] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.246025] env[68233]: DEBUG nova.compute.utils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 961.248458] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 961.249037] env[68233]: DEBUG nova.network.neutron [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 961.336830] env[68233]: DEBUG nova.policy [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34ad3361226b4d35904538bd7a916ad3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a0b7eed382649819b33a9370cd51228', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 961.361187] env[68233]: DEBUG nova.compute.manager [req-87325fd8-5cee-468b-b789-be6f2bc68d29 req-9304f8f4-a6c1-4b68-8c8b-de2dceae3df7 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Received event network-vif-plugged-33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 961.361187] env[68233]: DEBUG oslo_concurrency.lockutils [req-87325fd8-5cee-468b-b789-be6f2bc68d29 req-9304f8f4-a6c1-4b68-8c8b-de2dceae3df7 service nova] Acquiring lock "9c0e581d-5856-470f-a737-301649d701e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.363173] env[68233]: DEBUG oslo_concurrency.lockutils [req-87325fd8-5cee-468b-b789-be6f2bc68d29 req-9304f8f4-a6c1-4b68-8c8b-de2dceae3df7 service nova] Lock "9c0e581d-5856-470f-a737-301649d701e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.363556] env[68233]: DEBUG oslo_concurrency.lockutils [req-87325fd8-5cee-468b-b789-be6f2bc68d29 req-9304f8f4-a6c1-4b68-8c8b-de2dceae3df7 service nova] Lock "9c0e581d-5856-470f-a737-301649d701e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.363861] env[68233]: DEBUG nova.compute.manager [req-87325fd8-5cee-468b-b789-be6f2bc68d29 req-9304f8f4-a6c1-4b68-8c8b-de2dceae3df7 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] No waiting events found dispatching network-vif-plugged-33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 961.364160] env[68233]: WARNING nova.compute.manager [req-87325fd8-5cee-468b-b789-be6f2bc68d29 req-9304f8f4-a6c1-4b68-8c8b-de2dceae3df7 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Received unexpected event network-vif-plugged-33a2fb19-ac15-4669-ba90-af5e70070de2 for instance with vm_state building and task_state spawning. [ 961.400941] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782698, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.494633] env[68233]: DEBUG nova.network.neutron [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Successfully updated port: 33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.512534] env[68233]: DEBUG oslo_vmware.api [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Task: {'id': task-2782697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.641413} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.513694] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.513938] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.514215] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.514481] env[68233]: INFO nova.compute.manager [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Took 1.64 seconds to destroy the instance on the hypervisor. [ 961.514790] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.516811] env[68233]: DEBUG nova.compute.manager [-] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.516945] env[68233]: DEBUG nova.network.neutron [-] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.545363] env[68233]: DEBUG nova.network.neutron [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.749247] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 961.769742] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 961.769993] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559447', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'name': 'volume-b040816d-86e9-41f4-80a3-eb4938cd8774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7831d420-5a0a-4901-b7fe-95307b4b61f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'serial': 'b040816d-86e9-41f4-80a3-eb4938cd8774'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 961.770898] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49778f6c-94e9-473c-974d-9b71a920875f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.801223] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb47e604-b648-429a-af29-74a45e00d655 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.829601] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] volume-b040816d-86e9-41f4-80a3-eb4938cd8774/volume-b040816d-86e9-41f4-80a3-eb4938cd8774.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.833199] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-383aa1e4-cc41-4b7b-b7b3-5098b1470723 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.851744] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 961.851744] env[68233]: value = "task-2782699" [ 961.851744] env[68233]: _type = "Task" [ 961.851744] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.859549] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782699, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.899849] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651301} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.900120] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 961.900335] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 961.900580] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d74cefcd-f925-4230-bbc7-1ea73bc2915c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.906711] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 961.906711] env[68233]: value = "task-2782700" [ 961.906711] env[68233]: _type = "Task" [ 961.906711] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.915299] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782700, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.996913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-9c0e581d-5856-470f-a737-301649d701e5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.997077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-9c0e581d-5856-470f-a737-301649d701e5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.997231] env[68233]: DEBUG nova.network.neutron [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.050656] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.051385] env[68233]: DEBUG nova.objects.instance [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'flavor' on Instance uuid 13972b73-8bae-4a2a-a987-b6177381e7c8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.106018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931b96be-eb31-4ef0-b3c0-54b7d0785e27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.114178] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddaa424-b334-4887-9e36-2b4b7ea4db78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.148994] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b80260f-99f0-4aa5-90b3-a6831a3e211c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.158423] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03532975-8150-46e7-b56b-90c958a68677 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.175267] env[68233]: DEBUG nova.compute.provider_tree [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 962.180101] env[68233]: DEBUG nova.network.neutron [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Successfully created port: ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 962.261432] env[68233]: INFO nova.virt.block_device [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Booting with volume 6b0ac6e3-b417-4371-87f2-0141bc97e81c at /dev/sda [ 962.299789] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17dab0de-867b-44c4-9208-867796b1e8bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.310054] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7c08ad-4b42-4200-a1d3-556b089ae2f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.347945] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c775597-6843-4e7e-b73c-84f893adab00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.361406] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0d2ad3-2423-407f-9d42-112df068ee63 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.377622] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782699, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.399120] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f74eff78-92b8-4424-b44d-c08895dc29eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.406330] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4f8c53-82ca-4e88-a278-eeb2b3ad2efb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.417461] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782700, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06835} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.417724] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 962.418486] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860a2ee9-444f-43e4-84c8-b6cae3118ebc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.425724] env[68233]: DEBUG nova.virt.block_device [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating existing volume attachment record: 6584f577-cef8-427a-89e5-beb58230ca10 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 962.450684] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 962.452242] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b20b2672-4f6a-4777-be34-619657720f11 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.471911] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 962.471911] env[68233]: value = "task-2782701" [ 962.471911] env[68233]: _type = "Task" [ 962.471911] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.480248] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782701, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.544418] env[68233]: DEBUG nova.network.neutron [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.557990] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a2e387-9a17-4d71-b4f4-2eb4c8bb500f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.595911] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 962.599884] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d056286-9704-44a1-99f2-2320b7e66ee8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.607794] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 962.607794] env[68233]: value = "task-2782702" [ 962.607794] env[68233]: _type = "Task" [ 962.607794] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.617011] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782702, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.681162] env[68233]: DEBUG nova.scheduler.client.report [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 962.708859] env[68233]: DEBUG nova.network.neutron [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Updating instance_info_cache with network_info: [{"id": "33a2fb19-ac15-4669-ba90-af5e70070de2", "address": "fa:16:3e:1e:dd:28", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33a2fb19-ac", "ovs_interfaceid": "33a2fb19-ac15-4669-ba90-af5e70070de2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.863626] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782699, 'name': ReconfigVM_Task, 'duration_secs': 0.872197} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.863626] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Reconfigured VM instance instance-00000053 to attach disk [datastore2] volume-b040816d-86e9-41f4-80a3-eb4938cd8774/volume-b040816d-86e9-41f4-80a3-eb4938cd8774.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.868340] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da33c472-0e87-4ed2-9048-50f2e62d44a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.884663] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 962.884663] env[68233]: value = "task-2782703" [ 962.884663] env[68233]: _type = "Task" [ 962.884663] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.893130] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782703, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.983741] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782701, 'name': ReconfigVM_Task, 'duration_secs': 0.343344} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.984303] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.985575] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c1d4cd3-addb-4e87-8ef2-2627e871029f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.992713] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 962.992713] env[68233]: value = "task-2782704" [ 962.992713] env[68233]: _type = "Task" [ 962.992713] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.002283] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782704, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.070847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-73ca71c0-34cd-4393-82ff-4b297d350209-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.071136] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-73ca71c0-34cd-4393-82ff-4b297d350209-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.071482] env[68233]: DEBUG nova.objects.instance [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'flavor' on Instance uuid 73ca71c0-34cd-4393-82ff-4b297d350209 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.072981] env[68233]: DEBUG nova.network.neutron [-] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.117782] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782702, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.188891] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.451s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.189456] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 963.192223] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.463s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.192444] env[68233]: DEBUG nova.objects.instance [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lazy-loading 'resources' on Instance uuid c5b42243-878f-4150-a5d3-63d69e474bd1 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.212029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-9c0e581d-5856-470f-a737-301649d701e5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.212029] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance network_info: |[{"id": "33a2fb19-ac15-4669-ba90-af5e70070de2", "address": "fa:16:3e:1e:dd:28", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33a2fb19-ac", "ovs_interfaceid": "33a2fb19-ac15-4669-ba90-af5e70070de2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 963.212471] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:dd:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33a2fb19-ac15-4669-ba90-af5e70070de2', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 963.219813] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 963.220385] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 963.220591] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc50fb84-681a-42b7-8c96-53aa85c31962 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.242620] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 963.242620] env[68233]: value = "task-2782705" [ 963.242620] env[68233]: _type = "Task" [ 963.242620] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.251337] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782705, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.399037] env[68233]: DEBUG oslo_vmware.api [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782703, 'name': ReconfigVM_Task, 'duration_secs': 0.376283} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.399517] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559447', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'name': 'volume-b040816d-86e9-41f4-80a3-eb4938cd8774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7831d420-5a0a-4901-b7fe-95307b4b61f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'serial': 'b040816d-86e9-41f4-80a3-eb4938cd8774'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 963.503205] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782704, 'name': Rename_Task, 'duration_secs': 0.279841} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.503483] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 963.503727] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7c5bd5d-1a27-42b6-8d3e-f110dcdc3440 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.510836] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 963.510836] env[68233]: value = "task-2782706" [ 963.510836] env[68233]: _type = "Task" [ 963.510836] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.519241] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782706, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.529204] env[68233]: DEBUG nova.compute.manager [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Received event network-changed-33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 963.529424] env[68233]: DEBUG nova.compute.manager [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Refreshing instance network info cache due to event network-changed-33a2fb19-ac15-4669-ba90-af5e70070de2. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 963.529623] env[68233]: DEBUG oslo_concurrency.lockutils [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] Acquiring lock "refresh_cache-9c0e581d-5856-470f-a737-301649d701e5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.530254] env[68233]: DEBUG oslo_concurrency.lockutils [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] Acquired lock "refresh_cache-9c0e581d-5856-470f-a737-301649d701e5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.530254] env[68233]: DEBUG nova.network.neutron [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Refreshing network info cache for port 33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.577954] env[68233]: INFO nova.compute.manager [-] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Took 2.06 seconds to deallocate network for instance. [ 963.619325] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782702, 'name': PowerOffVM_Task, 'duration_secs': 0.546551} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.621792] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 963.627078] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfiguring VM instance instance-0000002f to detach disk 2002 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 963.627609] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc56fc14-7800-453c-a26a-72abd7e265f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.646985] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 963.646985] env[68233]: value = "task-2782707" [ 963.646985] env[68233]: _type = "Task" [ 963.646985] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.654775] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782707, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.672403] env[68233]: DEBUG nova.objects.instance [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'pci_requests' on Instance uuid 73ca71c0-34cd-4393-82ff-4b297d350209 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 963.696406] env[68233]: DEBUG nova.compute.utils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 963.700699] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 963.700699] env[68233]: DEBUG nova.network.neutron [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 963.741060] env[68233]: DEBUG nova.policy [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '839b33e7aa11482882403ddc2319583f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '853a057cfba3400ba05c89cb1d292f61', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 963.757411] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782705, 'name': CreateVM_Task, 'duration_secs': 0.407367} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.758083] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 963.758288] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.758447] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.758784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 963.759066] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dd22196-b561-4958-9544-8be400b32a24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.765465] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 963.765465] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529e0904-901d-91f6-c909-523e237f73f1" [ 963.765465] env[68233]: _type = "Task" [ 963.765465] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.773354] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529e0904-901d-91f6-c909-523e237f73f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.030333] env[68233]: DEBUG oslo_vmware.api [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782706, 'name': PowerOnVM_Task, 'duration_secs': 0.492833} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.031502] env[68233]: DEBUG nova.network.neutron [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Successfully updated port: ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 964.033636] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.033855] env[68233]: INFO nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Took 8.14 seconds to spawn the instance on the hypervisor. [ 964.035077] env[68233]: DEBUG nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.038191] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06653b7d-3b55-4d66-831d-3bba4e2566a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.069040] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342da110-d7fa-4e86-982a-d5f9e58a09fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.080359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09fc680-d60f-4b06-bc4d-b5296db8e733 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.087775] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.118018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d33d55a-efa6-442e-b84b-54a79ab54b1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.127260] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04506dd-e9e7-4627-9c4d-220bd09c02b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.145955] env[68233]: DEBUG nova.compute.provider_tree [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.156444] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782707, 'name': ReconfigVM_Task, 'duration_secs': 0.271314} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.156608] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfigured VM instance instance-0000002f to detach disk 2002 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 964.156801] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.157615] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac1b5007-13a7-426d-8974-d2ec4fcb35db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.166294] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 964.166294] env[68233]: value = "task-2782708" [ 964.166294] env[68233]: _type = "Task" [ 964.166294] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.173966] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.174480] env[68233]: DEBUG nova.objects.base [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Object Instance<73ca71c0-34cd-4393-82ff-4b297d350209> lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 964.174670] env[68233]: DEBUG nova.network.neutron [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 964.201414] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 964.258724] env[68233]: DEBUG nova.network.neutron [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Successfully created port: bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.275740] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529e0904-901d-91f6-c909-523e237f73f1, 'name': SearchDatastore_Task, 'duration_secs': 0.014299} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.276040] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.276269] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 964.276499] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.276643] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.276823] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 964.277085] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caafcc74-2066-4f0a-8a47-e7755888b89b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.281746] env[68233]: DEBUG nova.policy [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 964.284849] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 964.285052] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 964.285706] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49e1b5f6-2844-4a2e-9dc6-1bfe0f74f317 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.291323] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 964.291323] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d9f8b7-4ea4-aeee-34f4-cfc402cfa863" [ 964.291323] env[68233]: _type = "Task" [ 964.291323] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.300387] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d9f8b7-4ea4-aeee-34f4-cfc402cfa863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.462778] env[68233]: DEBUG nova.network.neutron [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Updated VIF entry in instance network info cache for port 33a2fb19-ac15-4669-ba90-af5e70070de2. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.463216] env[68233]: DEBUG nova.network.neutron [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Updating instance_info_cache with network_info: [{"id": "33a2fb19-ac15-4669-ba90-af5e70070de2", "address": "fa:16:3e:1e:dd:28", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap33a2fb19-ac", "ovs_interfaceid": "33a2fb19-ac15-4669-ba90-af5e70070de2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.464931] env[68233]: DEBUG nova.objects.instance [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'flavor' on Instance uuid 7831d420-5a0a-4901-b7fe-95307b4b61f0 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.537296] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.537454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquired lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 964.537610] env[68233]: DEBUG nova.network.neutron [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 964.539689] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 964.540460] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 964.541055] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 964.541055] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 964.541189] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 964.541293] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 964.541540] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 964.541753] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 964.541919] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 964.542105] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 964.542264] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 964.542432] env[68233]: DEBUG nova.virt.hardware [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 964.543995] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b6e3d3-dd93-4753-9381-de1ad3a92d02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.553370] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19298f29-8c75-4896-977e-84c5de12f5ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.565192] env[68233]: INFO nova.compute.manager [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Took 26.47 seconds to build instance. [ 964.652778] env[68233]: DEBUG nova.scheduler.client.report [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.679080] env[68233]: DEBUG oslo_vmware.api [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782708, 'name': PowerOnVM_Task, 'duration_secs': 0.388136} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.679080] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.679080] env[68233]: DEBUG nova.compute.manager [None req-92258228-cfe9-496b-b599-fc049c31a1a0 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.679365] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3844558-863e-4e0c-a75f-444ae9c8bd8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.728465] env[68233]: DEBUG nova.network.neutron [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Successfully created port: 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 964.805026] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d9f8b7-4ea4-aeee-34f4-cfc402cfa863, 'name': SearchDatastore_Task, 'duration_secs': 0.009898} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.805026] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-147d8714-314a-44a9-b022-3d5432baea37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.810587] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 964.810587] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52502a61-c349-db06-2511-4284eaddc1e5" [ 964.810587] env[68233]: _type = "Task" [ 964.810587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.819529] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52502a61-c349-db06-2511-4284eaddc1e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.969401] env[68233]: DEBUG oslo_concurrency.lockutils [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] Releasing lock "refresh_cache-9c0e581d-5856-470f-a737-301649d701e5" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.969401] env[68233]: DEBUG nova.compute.manager [req-0edada96-b03c-4790-9b02-26b31f3b7ab7 req-c4c68014-25cf-484f-be43-b7cdd7bdebc9 service nova] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Received event network-vif-deleted-087c1c8a-5f17-45b3-bcce-2013fb3783d5 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 964.970092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-93ffcf4f-2bd8-4beb-ac2c-08d5e573ced8 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.812s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.065859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ddf1c63-e8d6-453f-a8db-c48175b50491 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.985s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.083522] env[68233]: DEBUG nova.network.neutron [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 965.158097] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.163273] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.466s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.163554] env[68233]: DEBUG nova.objects.instance [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lazy-loading 'resources' on Instance uuid c6a358b7-0e6a-43bb-a171-5e6175f947bd {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.184252] env[68233]: INFO nova.scheduler.client.report [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted allocations for instance c5b42243-878f-4150-a5d3-63d69e474bd1 [ 965.214502] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 965.250390] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 965.250685] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 965.250886] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 965.251102] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 965.251901] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 965.251901] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 965.251901] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 965.251901] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 965.252129] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 965.252236] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 965.255622] env[68233]: DEBUG nova.virt.hardware [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 965.255622] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3fb206-7e6c-4579-aa18-240c4b670ccb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.262564] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b1e8c7-36d4-412d-92b0-9a5635c9706b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.267211] env[68233]: DEBUG nova.network.neutron [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating instance_info_cache with network_info: [{"id": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "address": "fa:16:3e:82:fa:cc", "network": {"id": "e943db9f-aba9-4d7d-8556-edf5debb5109", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-672616402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a0b7eed382649819b33a9370cd51228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce58be4a-6f", "ovs_interfaceid": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.324153] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52502a61-c349-db06-2511-4284eaddc1e5, 'name': SearchDatastore_Task, 'duration_secs': 0.035402} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.324513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.324876] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 965.325226] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e6579277-bcd1-438f-85bd-30fa19006f20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.332892] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 965.332892] env[68233]: value = "task-2782709" [ 965.332892] env[68233]: _type = "Task" [ 965.332892] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.343944] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.694867] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b6a0478a-8356-4c3e-b52e-c87df1dbd6e8 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "c5b42243-878f-4150-a5d3-63d69e474bd1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.457s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.762193] env[68233]: DEBUG nova.network.neutron [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Successfully updated port: bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.770974] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Releasing lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.771432] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance network_info: |[{"id": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "address": "fa:16:3e:82:fa:cc", "network": {"id": "e943db9f-aba9-4d7d-8556-edf5debb5109", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-672616402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a0b7eed382649819b33a9370cd51228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce58be4a-6f", "ovs_interfaceid": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 965.771835] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:fa:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5116f690-f825-4fee-8a47-42b073e716c5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 965.779762] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Creating folder: Project (7a0b7eed382649819b33a9370cd51228). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.784617] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2461736b-5543-4637-ad5c-5c63e0104f43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.802243] env[68233]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 965.802439] env[68233]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68233) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 965.803077] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Folder already exists: Project (7a0b7eed382649819b33a9370cd51228). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 965.803295] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Creating folder: Instances. Parent ref: group-v559434. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 965.803881] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e20cd0a8-be66-44b6-afbe-28e8845b5963 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.817051] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Created folder: Instances in parent group-v559434. [ 965.817315] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.817507] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 965.817720] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1cbfb1d-e708-4056-af6a-24253d3daf44 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.855915] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499594} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.857551] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 965.859639] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 965.859639] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 965.859639] env[68233]: value = "task-2782712" [ 965.859639] env[68233]: _type = "Task" [ 965.859639] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.859639] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ea1ba0a-8b94-42ab-9a78-0b002a5ea39d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.873075] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782712, 'name': CreateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.874446] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 965.874446] env[68233]: value = "task-2782713" [ 965.874446] env[68233]: _type = "Task" [ 965.874446] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.886761] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.002544] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.002853] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.003092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "7831d420-5a0a-4901-b7fe-95307b4b61f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.003283] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.003452] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.005707] env[68233]: INFO nova.compute.manager [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Terminating instance [ 966.026197] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd90e24-7bdf-4e47-a61f-744aae65087f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.034068] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce9cfe9-73f4-47d3-ac48-b38fb0f2350a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.064289] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e30b0c-a8fc-4cd6-bfc3-e32cd58d5645 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.071249] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0e6b1b-90b2-47af-a256-9d9bee18f120 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.084045] env[68233]: DEBUG nova.compute.provider_tree [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.264898] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "refresh_cache-edf4bfac-175b-40b7-bf08-298c4735bfae" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.265148] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "refresh_cache-edf4bfac-175b-40b7-bf08-298c4735bfae" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.265291] env[68233]: DEBUG nova.network.neutron [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 966.375335] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782712, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.393626] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082161} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.393626] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 966.393626] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c2e0f61-24c8-459e-a909-a8f04bddaf66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.427016] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 966.427016] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e21ed096-e55f-4e2c-8088-b7161a2aa038 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.444265] env[68233]: DEBUG nova.compute.manager [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Received event network-vif-plugged-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 966.444477] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] Acquiring lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.444682] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.444849] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.445016] env[68233]: DEBUG nova.compute.manager [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] No waiting events found dispatching network-vif-plugged-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 966.445186] env[68233]: WARNING nova.compute.manager [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Received unexpected event network-vif-plugged-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 for instance with vm_state building and task_state spawning. [ 966.445347] env[68233]: DEBUG nova.compute.manager [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Received event network-changed-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 966.445497] env[68233]: DEBUG nova.compute.manager [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Refreshing instance network info cache due to event network-changed-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 966.445676] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] Acquiring lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.445810] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] Acquired lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.446266] env[68233]: DEBUG nova.network.neutron [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Refreshing network info cache for port ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 966.453787] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 966.453787] env[68233]: value = "task-2782714" [ 966.453787] env[68233]: _type = "Task" [ 966.453787] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.463966] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782714, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.492485] env[68233]: DEBUG nova.network.neutron [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Successfully updated port: 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 966.510017] env[68233]: DEBUG nova.compute.manager [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 966.510277] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 966.510551] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b551a159-fbb5-4aee-88dc-6a89386b2e82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.519039] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 966.519039] env[68233]: value = "task-2782715" [ 966.519039] env[68233]: _type = "Task" [ 966.519039] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.527975] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782715, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.537829] env[68233]: DEBUG nova.compute.manager [req-583cd141-8336-4db5-b174-5e6cff6c64c2 req-6334a40f-0ba6-4623-ad13-81204d94aaac service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Received event network-vif-plugged-bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 966.538086] env[68233]: DEBUG oslo_concurrency.lockutils [req-583cd141-8336-4db5-b174-5e6cff6c64c2 req-6334a40f-0ba6-4623-ad13-81204d94aaac service nova] Acquiring lock "edf4bfac-175b-40b7-bf08-298c4735bfae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.538606] env[68233]: DEBUG oslo_concurrency.lockutils [req-583cd141-8336-4db5-b174-5e6cff6c64c2 req-6334a40f-0ba6-4623-ad13-81204d94aaac service nova] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.538606] env[68233]: DEBUG oslo_concurrency.lockutils [req-583cd141-8336-4db5-b174-5e6cff6c64c2 req-6334a40f-0ba6-4623-ad13-81204d94aaac service nova] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.538756] env[68233]: DEBUG nova.compute.manager [req-583cd141-8336-4db5-b174-5e6cff6c64c2 req-6334a40f-0ba6-4623-ad13-81204d94aaac service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] No waiting events found dispatching network-vif-plugged-bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 966.538868] env[68233]: WARNING nova.compute.manager [req-583cd141-8336-4db5-b174-5e6cff6c64c2 req-6334a40f-0ba6-4623-ad13-81204d94aaac service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Received unexpected event network-vif-plugged-bf0551bd-3228-40bf-84cb-a459a20639b8 for instance with vm_state building and task_state spawning. [ 966.587112] env[68233]: DEBUG nova.scheduler.client.report [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 966.803976] env[68233]: DEBUG nova.network.neutron [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.873953] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782712, 'name': CreateVM_Task, 'duration_secs': 0.541367} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.874200] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 966.874971] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559441', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'name': 'volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '287df4d5-4e98-464d-8f0a-4571c1e4df4f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'serial': '6b0ac6e3-b417-4371-87f2-0141bc97e81c'}, 'boot_index': 0, 'attachment_id': '6584f577-cef8-427a-89e5-beb58230ca10', 'mount_device': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68233) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 966.875290] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Root volume attach. Driver type: vmdk {{(pid=68233) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 966.876456] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9e3613-a3af-4f2b-b0cf-36239d58f324 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.884594] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c449fa-ed14-48a3-b330-f84a62e22332 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.892241] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3feb7131-53af-4d63-a4c5-8c3b4532edf5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.899237] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-249def6b-c3f0-4003-8a79-4f473b289bfc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.909636] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 966.909636] env[68233]: value = "task-2782716" [ 966.909636] env[68233]: _type = "Task" [ 966.909636] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.918362] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782716, 'name': RelocateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.963962] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782714, 'name': ReconfigVM_Task, 'duration_secs': 0.403902} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.964365] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 966.964905] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8679ee7e-42f8-4c9b-a6d4-5bad0fd49e34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.971467] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 966.971467] env[68233]: value = "task-2782717" [ 966.971467] env[68233]: _type = "Task" [ 966.971467] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.979628] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782717, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.998811] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.999009] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.999200] env[68233]: DEBUG nova.network.neutron [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.028669] env[68233]: DEBUG nova.network.neutron [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Updating instance_info_cache with network_info: [{"id": "bf0551bd-3228-40bf-84cb-a459a20639b8", "address": "fa:16:3e:36:76:65", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf0551bd-32", "ovs_interfaceid": "bf0551bd-3228-40bf-84cb-a459a20639b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.037971] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782715, 'name': PowerOffVM_Task, 'duration_secs': 0.308253} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.038451] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 967.040202] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 967.040202] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559447', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'name': 'volume-b040816d-86e9-41f4-80a3-eb4938cd8774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7831d420-5a0a-4901-b7fe-95307b4b61f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'serial': 'b040816d-86e9-41f4-80a3-eb4938cd8774'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 967.041668] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bdc121-8323-4f2a-85df-e0f904a24034 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.070938] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c4f8005-8d3a-4716-938d-0eef4a244228 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.078737] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb94c0dc-7cd1-4c3d-b829-019dd32c200f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.100499] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.937s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.103389] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.879s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.103657] env[68233]: DEBUG nova.objects.instance [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lazy-loading 'resources' on Instance uuid 0b1065c2-7923-4dc4-a64f-be72a7994472 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.105327] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec19b75a-216e-44ef-bd92-03a1d355440d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.124264] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] The volume has not been displaced from its original location: [datastore2] volume-b040816d-86e9-41f4-80a3-eb4938cd8774/volume-b040816d-86e9-41f4-80a3-eb4938cd8774.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 967.129918] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Reconfiguring VM instance instance-00000053 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 967.131209] env[68233]: INFO nova.scheduler.client.report [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Deleted allocations for instance c6a358b7-0e6a-43bb-a171-5e6175f947bd [ 967.132721] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8922d42e-b344-4095-a1ba-85e2f0422d4a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.153773] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 967.153773] env[68233]: value = "task-2782718" [ 967.153773] env[68233]: _type = "Task" [ 967.153773] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.166195] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782718, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.289949] env[68233]: DEBUG nova.network.neutron [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updated VIF entry in instance network info cache for port ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 967.290389] env[68233]: DEBUG nova.network.neutron [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating instance_info_cache with network_info: [{"id": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "address": "fa:16:3e:82:fa:cc", "network": {"id": "e943db9f-aba9-4d7d-8556-edf5debb5109", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-672616402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a0b7eed382649819b33a9370cd51228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce58be4a-6f", "ovs_interfaceid": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.420304] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782716, 'name': RelocateVM_Task} progress is 20%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.484619] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782717, 'name': Rename_Task, 'duration_secs': 0.254359} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.484896] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 967.485159] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1af4133-de66-424d-a42a-279a5f2521c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.491820] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 967.491820] env[68233]: value = "task-2782719" [ 967.491820] env[68233]: _type = "Task" [ 967.491820] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.502844] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.532369] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "refresh_cache-edf4bfac-175b-40b7-bf08-298c4735bfae" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.533209] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Instance network_info: |[{"id": "bf0551bd-3228-40bf-84cb-a459a20639b8", "address": "fa:16:3e:36:76:65", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf0551bd-32", "ovs_interfaceid": "bf0551bd-3228-40bf-84cb-a459a20639b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 967.533574] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:76:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1eed7865-f9d8-463e-843f-3b0b3a962a2c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf0551bd-3228-40bf-84cb-a459a20639b8', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 967.552377] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 967.552377] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 967.552377] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6ff31541-e3b9-40df-9543-5155dba13bc2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.580855] env[68233]: WARNING nova.network.neutron [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] 5065c922-1b9f-4d7f-8615-b5619dd4fc68 already exists in list: networks containing: ['5065c922-1b9f-4d7f-8615-b5619dd4fc68']. ignoring it [ 967.588763] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 967.588763] env[68233]: value = "task-2782720" [ 967.588763] env[68233]: _type = "Task" [ 967.588763] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.598732] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782720, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.654917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-659b0fcc-5c23-4945-9c78-210c3562da8b tempest-ServersAdminTestJSON-123061877 tempest-ServersAdminTestJSON-123061877-project-member] Lock "c6a358b7-0e6a-43bb-a171-5e6175f947bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.535s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.667798] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782718, 'name': ReconfigVM_Task, 'duration_secs': 0.396458} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.667798] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Reconfigured VM instance instance-00000053 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 967.674326] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2682c45a-d7f6-47e1-b109-99e639ae265e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.692148] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 967.692148] env[68233]: value = "task-2782721" [ 967.692148] env[68233]: _type = "Task" [ 967.692148] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.706733] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782721, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.794642] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa6611a6-e99b-4c37-85e7-5816c3690ad3 req-2c45a5cb-818b-46f3-8b23-5019ebccd2ad service nova] Releasing lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.924112] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782716, 'name': RelocateVM_Task, 'duration_secs': 0.525269} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.927180] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 967.927531] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559441', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'name': 'volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '287df4d5-4e98-464d-8f0a-4571c1e4df4f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'serial': '6b0ac6e3-b417-4371-87f2-0141bc97e81c'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 967.930964] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfcacd6-365c-4a27-8092-b3b48722d163 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.955829] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2336289-f31b-4faa-aaa1-2d86b0617297 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.959287] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dd0662-73fd-4cee-9dab-39fe8fd804a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.981877] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496fcb3e-fd5b-49bf-9b88-d729d4946ca9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.994187] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c/volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 967.994754] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e7ba950-7437-4b82-84ee-7360548c1cef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.053120] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3371f980-b1f0-47b5-afa2-09347f2fa9da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.060547] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782719, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.060979] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 968.060979] env[68233]: value = "task-2782722" [ 968.060979] env[68233]: _type = "Task" [ 968.060979] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.069010] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccf98fe-b5ba-4978-a055-3cc6a15c1ba1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.077593] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.087927] env[68233]: DEBUG nova.compute.provider_tree [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.104776] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782720, 'name': CreateVM_Task, 'duration_secs': 0.443893} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.105169] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 968.105968] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.106160] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.106545] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 968.106798] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-703c5bff-ceae-46e2-942b-2bd2eaec5e38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.111596] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 968.111596] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aefc81-254c-3555-f082-776df70d406d" [ 968.111596] env[68233]: _type = "Task" [ 968.111596] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.122813] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aefc81-254c-3555-f082-776df70d406d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.207597] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782721, 'name': ReconfigVM_Task, 'duration_secs': 0.246223} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.208245] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559447', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'name': 'volume-b040816d-86e9-41f4-80a3-eb4938cd8774', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7831d420-5a0a-4901-b7fe-95307b4b61f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'b040816d-86e9-41f4-80a3-eb4938cd8774', 'serial': 'b040816d-86e9-41f4-80a3-eb4938cd8774'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 968.208245] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.208940] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b097260-c9e1-4917-a88c-e94c7189d3de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.216474] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 968.217322] env[68233]: DEBUG nova.network.neutron [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60ae55cd-a0e9-4d27-b45d-0fb840eb0b36", "address": "fa:16:3e:c8:cb:4e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60ae55cd-a0", "ovs_interfaceid": "60ae55cd-a0e9-4d27-b45d-0fb840eb0b36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.218495] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26302877-9705-432c-af45-af6c6c73c64b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.295021] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 968.295021] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 968.295021] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleting the datastore file [datastore2] 7831d420-5a0a-4901-b7fe-95307b4b61f0 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 968.295021] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-714e0e8f-be6b-4ca7-b0b5-e5406caabd6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.301056] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 968.301056] env[68233]: value = "task-2782724" [ 968.301056] env[68233]: _type = "Task" [ 968.301056] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.308903] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782724, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.509018] env[68233]: DEBUG oslo_vmware.api [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782719, 'name': PowerOnVM_Task, 'duration_secs': 0.578317} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.509018] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 968.509018] env[68233]: INFO nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Took 8.30 seconds to spawn the instance on the hypervisor. [ 968.509018] env[68233]: DEBUG nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 968.509018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a933134-dbaf-43d4-988f-40f29810b41c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.526294] env[68233]: DEBUG nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.526294] env[68233]: DEBUG nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing instance network info cache due to event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 968.526294] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.526671] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.526979] env[68233]: DEBUG nova.network.neutron [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.575144] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782722, 'name': ReconfigVM_Task, 'duration_secs': 0.434049} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.575714] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Reconfigured VM instance instance-00000056 to attach disk [datastore2] volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c/volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 968.581850] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d87e63c0-f3d0-4506-addf-0f8660ec4f3d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.600130] env[68233]: DEBUG nova.scheduler.client.report [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 968.603556] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 968.603556] env[68233]: value = "task-2782725" [ 968.603556] env[68233]: _type = "Task" [ 968.603556] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.615822] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782725, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.629217] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aefc81-254c-3555-f082-776df70d406d, 'name': SearchDatastore_Task, 'duration_secs': 0.011951} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.629217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.629217] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 968.629217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.629217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.629616] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 968.631296] env[68233]: DEBUG nova.compute.manager [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Received event network-changed-bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 968.631665] env[68233]: DEBUG nova.compute.manager [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Refreshing instance network info cache due to event network-changed-bf0551bd-3228-40bf-84cb-a459a20639b8. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 968.632140] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Acquiring lock "refresh_cache-edf4bfac-175b-40b7-bf08-298c4735bfae" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.633708] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Acquired lock "refresh_cache-edf4bfac-175b-40b7-bf08-298c4735bfae" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.633708] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Refreshing network info cache for port bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.634164] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-920e7db4-1904-46ee-a427-c09020325717 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.644613] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 968.644807] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 968.645613] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64b1302f-6f34-45bc-a448-e7c3e7161084 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.651331] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 968.651331] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523be97b-ac15-1445-d11f-f91e127eec03" [ 968.651331] env[68233]: _type = "Task" [ 968.651331] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.659534] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523be97b-ac15-1445-d11f-f91e127eec03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.724027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.724027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.724027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.724027] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f67e449-ce8f-4d83-8e68-c61ca89733a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.741596] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 968.741748] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 968.741994] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 968.742249] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 968.742442] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 968.742674] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 968.742958] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 968.743210] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 968.743458] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 968.743699] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 968.743930] env[68233]: DEBUG nova.virt.hardware [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 968.750307] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Reconfiguring VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 968.750677] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9597f22c-2afa-4da5-8fa1-460fce51bc7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.767819] env[68233]: DEBUG oslo_vmware.api [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 968.767819] env[68233]: value = "task-2782726" [ 968.767819] env[68233]: _type = "Task" [ 968.767819] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.776895] env[68233]: DEBUG oslo_vmware.api [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782726, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.810824] env[68233]: DEBUG oslo_vmware.api [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782724, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291396} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.811110] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 968.811303] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 968.811513] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 968.811753] env[68233]: INFO nova.compute.manager [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Took 2.30 seconds to destroy the instance on the hypervisor. [ 968.811990] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 968.812223] env[68233]: DEBUG nova.compute.manager [-] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 968.812316] env[68233]: DEBUG nova.network.neutron [-] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.031422] env[68233]: INFO nova.compute.manager [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Took 25.46 seconds to build instance. [ 969.109718] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.113051] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.283s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.113051] env[68233]: DEBUG nova.objects.instance [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lazy-loading 'resources' on Instance uuid 990e1a66-f2ab-4925-b1da-58cdc41a6315 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 969.121007] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782725, 'name': ReconfigVM_Task, 'duration_secs': 0.243901} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.121588] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559441', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'name': 'volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '287df4d5-4e98-464d-8f0a-4571c1e4df4f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'serial': '6b0ac6e3-b417-4371-87f2-0141bc97e81c'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 969.122219] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d030ac2e-09fa-4a03-88ad-941301f3b265 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.131279] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 969.131279] env[68233]: value = "task-2782727" [ 969.131279] env[68233]: _type = "Task" [ 969.131279] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.138475] env[68233]: INFO nova.scheduler.client.report [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 0b1065c2-7923-4dc4-a64f-be72a7994472 [ 969.147998] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782727, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.162154] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523be97b-ac15-1445-d11f-f91e127eec03, 'name': SearchDatastore_Task, 'duration_secs': 0.009896} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.163146] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08bbcd96-7dc0-40c9-8ee6-774f6416bdb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.170842] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 969.170842] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520e81d7-4b2a-bc55-bec1-3170fc756401" [ 969.170842] env[68233]: _type = "Task" [ 969.170842] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.187946] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520e81d7-4b2a-bc55-bec1-3170fc756401, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.188282] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.188598] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] edf4bfac-175b-40b7-bf08-298c4735bfae/edf4bfac-175b-40b7-bf08-298c4735bfae.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 969.188906] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83f2dee5-5df0-4e66-b449-753c93f1614b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.198690] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 969.198690] env[68233]: value = "task-2782728" [ 969.198690] env[68233]: _type = "Task" [ 969.198690] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.212110] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.279361] env[68233]: DEBUG oslo_vmware.api [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.388596] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Updated VIF entry in instance network info cache for port bf0551bd-3228-40bf-84cb-a459a20639b8. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.388654] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Updating instance_info_cache with network_info: [{"id": "bf0551bd-3228-40bf-84cb-a459a20639b8", "address": "fa:16:3e:36:76:65", "network": {"id": "ecbf7146-c846-4d97-8003-be18a959e40c", "bridge": "br-int", "label": "tempest-ImagesTestJSON-859499172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "853a057cfba3400ba05c89cb1d292f61", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1eed7865-f9d8-463e-843f-3b0b3a962a2c", "external-id": "nsx-vlan-transportzone-852", "segmentation_id": 852, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf0551bd-32", "ovs_interfaceid": "bf0551bd-3228-40bf-84cb-a459a20639b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.508905] env[68233]: DEBUG nova.network.neutron [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updated VIF entry in instance network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.509535] env[68233]: DEBUG nova.network.neutron [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.534237] env[68233]: DEBUG oslo_concurrency.lockutils [None req-14e7c5fb-8e96-487f-97fb-7c3d5394baee tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.972s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.549058] env[68233]: DEBUG nova.network.neutron [-] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.596249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.596566] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.642204] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782727, 'name': Rename_Task, 'duration_secs': 0.142694} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.645178] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 969.646603] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef4bfe33-e2eb-4bbe-af54-f1c4021b5d97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.651471] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b925e367-9727-4c17-9cbe-7eb2f6184883 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "0b1065c2-7923-4dc4-a64f-be72a7994472" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.896s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.653691] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 969.653691] env[68233]: value = "task-2782729" [ 969.653691] env[68233]: _type = "Task" [ 969.653691] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.662465] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.714803] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782728, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.779144] env[68233]: DEBUG oslo_vmware.api [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.892066] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Releasing lock "refresh_cache-edf4bfac-175b-40b7-bf08-298c4735bfae" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.892343] env[68233]: DEBUG nova.compute.manager [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 969.892556] env[68233]: DEBUG nova.compute.manager [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing instance network info cache due to event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 969.892741] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.892884] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.893062] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.907139] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3a11a7-4155-449e-9052-c84e1443c991 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.916201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e0b48c5-c9ca-4c57-8ad3-3de68c8d1e87 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.946973] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac73d73-c5a5-4326-aecb-9389fc6f3712 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.955184] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194e33a1-22d0-44c1-9834-dfe7dba7eb44 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.968895] env[68233]: DEBUG nova.compute.provider_tree [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.013271] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.013543] env[68233]: DEBUG nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-vif-plugged-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 970.013736] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.013973] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.014162] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.014334] env[68233]: DEBUG nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] No waiting events found dispatching network-vif-plugged-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 970.014505] env[68233]: WARNING nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received unexpected event network-vif-plugged-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 for instance with vm_state active and task_state None. [ 970.014667] env[68233]: DEBUG nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-changed-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 970.014822] env[68233]: DEBUG nova.compute.manager [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Refreshing instance network info cache due to event network-changed-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 970.015016] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Acquiring lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.015171] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Acquired lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.015328] env[68233]: DEBUG nova.network.neutron [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Refreshing network info cache for port 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.052295] env[68233]: INFO nova.compute.manager [-] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Took 1.24 seconds to deallocate network for instance. [ 970.100692] env[68233]: DEBUG nova.compute.utils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 970.164597] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782729, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.212900] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782728, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.281848] env[68233]: DEBUG oslo_vmware.api [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782726, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.472138] env[68233]: DEBUG nova.scheduler.client.report [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 970.604460] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.610848] env[68233]: INFO nova.compute.manager [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Took 0.56 seconds to detach 1 volumes for instance. [ 970.667211] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782729, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.695645] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updated VIF entry in instance network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.695645] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.711040] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782728, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.782999] env[68233]: DEBUG oslo_vmware.api [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782726, 'name': ReconfigVM_Task, 'duration_secs': 1.802954} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.783922] env[68233]: DEBUG nova.network.neutron [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updated VIF entry in instance network info cache for port 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.784517] env[68233]: DEBUG nova.network.neutron [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "60ae55cd-a0e9-4d27-b45d-0fb840eb0b36", "address": "fa:16:3e:c8:cb:4e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60ae55cd-a0", "ovs_interfaceid": "60ae55cd-a0e9-4d27-b45d-0fb840eb0b36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.786100] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.786398] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Reconfigured VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 970.977572] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.980107] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 15.155s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.000908] env[68233]: INFO nova.scheduler.client.report [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Deleted allocations for instance 990e1a66-f2ab-4925-b1da-58cdc41a6315 [ 971.118416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.170239] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782729, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.198470] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.199234] env[68233]: DEBUG nova.compute.manager [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 971.201484] env[68233]: DEBUG nova.compute.manager [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing instance network info cache due to event network-changed-9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 971.201484] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Acquiring lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.201484] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Acquired lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 971.201484] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Refreshing network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 971.218577] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782728, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.650812} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.219770] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] edf4bfac-175b-40b7-bf08-298c4735bfae/edf4bfac-175b-40b7-bf08-298c4735bfae.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 971.220180] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 971.222193] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f78ea173-2f81-402e-9bcb-70b7c524e93b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.232522] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 971.232522] env[68233]: value = "task-2782730" [ 971.232522] env[68233]: _type = "Task" [ 971.232522] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.248021] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782730, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.290502] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d450444-0856-45cb-877a-3483b038936e req-8984278c-65d3-416d-88cb-5b7e039bc935 service nova] Releasing lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.293137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1bedcd62-d44c-47a4-b138-5a9d7a042692 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-73ca71c0-34cd-4393-82ff-4b297d350209-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.222s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.509536] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c25e9b72-a253-4a81-84b0-6048d97cd413 tempest-ServersTestFqdnHostnames-1308639321 tempest-ServersTestFqdnHostnames-1308639321-project-member] Lock "990e1a66-f2ab-4925-b1da-58cdc41a6315" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.462s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.667336] env[68233]: DEBUG oslo_vmware.api [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782729, 'name': PowerOnVM_Task, 'duration_secs': 1.615832} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.667642] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.667867] env[68233]: INFO nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Took 7.13 seconds to spawn the instance on the hypervisor. [ 971.668062] env[68233]: DEBUG nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.668850] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f0421b-aafb-4e9d-9b3e-038f0513849b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.711386] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.711716] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 971.712223] env[68233]: INFO nova.compute.manager [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Attaching volume 303bdc2c-2328-431b-b19a-48cd4fd023fd to /dev/sdb [ 971.745254] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782730, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181194} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.745254] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.745421] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e03b7e90-ed73-4c30-9844-7d314f99cdd5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.772272] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] edf4bfac-175b-40b7-bf08-298c4735bfae/edf4bfac-175b-40b7-bf08-298c4735bfae.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.773706] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f249a74f-4b0f-49d2-9aba-8057d2fa8a82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.789179] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05b62f5-641b-4b8b-8a0a-3b6b826d7ff3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.800765] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66e4d7f4-ed5e-4fd8-ab5f-7ad4fdac3d76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.803456] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 971.803456] env[68233]: value = "task-2782731" [ 971.803456] env[68233]: _type = "Task" [ 971.803456] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.812915] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782731, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.817120] env[68233]: DEBUG nova.virt.block_device [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Updating existing volume attachment record: d9a8a4e8-5db8-4eb5-95ec-ef08ca0c6eeb {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 971.967059] env[68233]: DEBUG nova.compute.manager [req-df42a0ed-4a6a-4875-82ed-fcf6bfd719d9 req-e7e170d0-da62-4a1d-b384-001633627ebd service nova] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Received event network-vif-deleted-aa886750-c433-4287-826b-2f74ab52f0d0 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 971.977621] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updated VIF entry in instance network info cache for port 9041c031-c9af-4931-8450-0b57b0e71c17. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 971.978070] env[68233]: DEBUG nova.network.neutron [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [{"id": "9041c031-c9af-4931-8450-0b57b0e71c17", "address": "fa:16:3e:bd:0d:3d", "network": {"id": "776cc66b-482e-4bd5-9d6f-271e5ae6e382", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-327461711-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "963898fb1cae4e6e9438ace9dd437f9e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0dd3c126-9d86-4f9a-b81c-e9627c7a5401", "external-id": "nsx-vlan-transportzone-24", "segmentation_id": 24, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9041c031-c9", "ovs_interfaceid": "9041c031-c9af-4931-8450-0b57b0e71c17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4a388705-7e00-45dc-8891-c6e587b1cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 87385201-3118-4a8e-9739-db3b431566c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 11ec9800-fa7e-4dbd-bdc1-63d0b496589f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2812bf7c-5117-4fd9-9330-0cc94277bf5d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 13972b73-8bae-4a2a-a987-b6177381e7c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 48270554-abe4-4f72-b8b9-5f2de6a9ed26 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 972.032240] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance d0d6eed0-db5b-4371-8f03-b3415fd833f0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4677d047-f8dc-4501-be9b-14e6a2222f46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dca145c8-ed95-4dfb-9534-37035c75dafb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 3f79709a-b8b7-4838-8731-d051155ff4f3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 73ca71c0-34cd-4393-82ff-4b297d350209 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 827711ac-ef52-41a0-9029-0a1805522a08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.032240] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 7831d420-5a0a-4901-b7fe-95307b4b61f0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 972.033815] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 62cd066c-5eac-4f07-bf4e-9275fedc7384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.033815] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9c0e581d-5856-470f-a737-301649d701e5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.033815] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.033815] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance edf4bfac-175b-40b7-bf08-298c4735bfae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 972.033815] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 972.033815] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 972.187227] env[68233]: INFO nova.compute.manager [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Took 27.60 seconds to build instance. [ 972.321088] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782731, 'name': ReconfigVM_Task, 'duration_secs': 0.498496} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.321143] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Reconfigured VM instance instance-00000057 to attach disk [datastore2] edf4bfac-175b-40b7-bf08-298c4735bfae/edf4bfac-175b-40b7-bf08-298c4735bfae.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.321834] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d934f70d-57f9-433f-a649-0d3faa0b5b35 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.328661] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 972.328661] env[68233]: value = "task-2782735" [ 972.328661] env[68233]: _type = "Task" [ 972.328661] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.337610] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782735, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.361708] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f405a850-ae5e-47a7-ac48-b4abdcc51569 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.369402] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3922c5-2783-4350-9941-8910404ba968 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.405579] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a3e788-25ed-4b5a-bfd7-f78d8ddd3b5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.414044] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00fdb73-c637-4488-8376-9bbab5f35987 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.428635] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.481185] env[68233]: DEBUG oslo_concurrency.lockutils [req-21a06965-b3c4-4851-bba1-f03c39c18e9c req-db8ac303-a480-4275-860f-77c70222457e service nova] Releasing lock "refresh_cache-13972b73-8bae-4a2a-a987-b6177381e7c8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 972.520084] env[68233]: DEBUG oslo_concurrency.lockutils [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "9c0e581d-5856-470f-a737-301649d701e5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.520404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.520640] env[68233]: DEBUG nova.compute.manager [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.521643] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e957bd3e-7bf4-4475-8686-6b46f84b37fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.530110] env[68233]: DEBUG nova.compute.manager [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 972.530779] env[68233]: DEBUG nova.objects.instance [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'flavor' on Instance uuid 9c0e581d-5856-470f-a737-301649d701e5 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.695449] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a32b245e-cc06-4ede-baea-d447e2b3150c tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.131s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.716749] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "3f79709a-b8b7-4838-8731-d051155ff4f3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.717042] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.717257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "3f79709a-b8b7-4838-8731-d051155ff4f3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.717440] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.717608] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.719868] env[68233]: INFO nova.compute.manager [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Terminating instance [ 972.838966] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782735, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.932510] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 973.197621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.198317] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.198504] env[68233]: INFO nova.compute.manager [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Shelving [ 973.223413] env[68233]: DEBUG nova.compute.manager [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 973.223802] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 973.224831] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592c70a3-3892-4af1-a523-5b18c4e2e8dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.233381] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.233612] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8032eb64-4138-4b0b-a104-b561dcf67b2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.240364] env[68233]: DEBUG oslo_vmware.api [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 973.240364] env[68233]: value = "task-2782736" [ 973.240364] env[68233]: _type = "Task" [ 973.240364] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.250718] env[68233]: DEBUG oslo_vmware.api [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.340604] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782735, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.443195] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 973.443432] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.463s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.443784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.216s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.444025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.446122] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.359s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.446333] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.448352] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.330s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.448524] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.456633] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-73ca71c0-34cd-4393-82ff-4b297d350209-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.456902] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-73ca71c0-34cd-4393-82ff-4b297d350209-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.479660] env[68233]: INFO nova.scheduler.client.report [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Deleted allocations for instance 48270554-abe4-4f72-b8b9-5f2de6a9ed26 [ 973.487044] env[68233]: INFO nova.scheduler.client.report [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocations for instance 7831d420-5a0a-4901-b7fe-95307b4b61f0 [ 973.501407] env[68233]: INFO nova.scheduler.client.report [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Deleted allocations for instance d0d6eed0-db5b-4371-8f03-b3415fd833f0 [ 973.538121] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 973.538121] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2342b7ee-66a7-47e3-9c1b-f955923c9032 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.547093] env[68233]: DEBUG oslo_vmware.api [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 973.547093] env[68233]: value = "task-2782737" [ 973.547093] env[68233]: _type = "Task" [ 973.547093] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.558464] env[68233]: DEBUG oslo_vmware.api [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.751465] env[68233]: DEBUG oslo_vmware.api [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782736, 'name': PowerOffVM_Task, 'duration_secs': 0.405514} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.751895] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 973.752108] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 973.752428] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84362fea-6e89-4d7b-af55-5465b838d7b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.824022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 973.824233] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 973.824430] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore1] 3f79709a-b8b7-4838-8731-d051155ff4f3 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 973.824717] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17fa5fc2-3045-412c-ad32-d16ad84a001e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.839587] env[68233]: DEBUG oslo_vmware.api [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 973.839587] env[68233]: value = "task-2782739" [ 973.839587] env[68233]: _type = "Task" [ 973.839587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.849011] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782735, 'name': Rename_Task, 'duration_secs': 1.292816} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.849794] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 973.850118] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcb858fa-0407-464a-92c7-feefd42bae40 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.854927] env[68233]: DEBUG oslo_vmware.api [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782739, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.860485] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 973.860485] env[68233]: value = "task-2782740" [ 973.860485] env[68233]: _type = "Task" [ 973.860485] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.872154] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782740, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.963108] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 973.963369] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 973.964338] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cda3c49-a783-4f9f-a368-3595c0181e5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.988911] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808ac71f-5664-4689-97e9-e2ca5a2dd157 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.996938] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f394d4d9-b931-4e4d-a705-f681d96a978d tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "7831d420-5a0a-4901-b7fe-95307b4b61f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.994s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.998037] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dd8bd332-6d74-4968-974d-75a7c318d654 tempest-ServersWithSpecificFlavorTestJSON-973032085 tempest-ServersWithSpecificFlavorTestJSON-973032085-project-member] Lock "48270554-abe4-4f72-b8b9-5f2de6a9ed26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.631s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.021889] env[68233]: DEBUG oslo_concurrency.lockutils [None req-47cadf67-a99b-4606-99b1-68335f205bac tempest-ServersV294TestFqdnHostnames-298181920 tempest-ServersV294TestFqdnHostnames-298181920-project-member] Lock "d0d6eed0-db5b-4371-8f03-b3415fd833f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.864s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.029882] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Reconfiguring VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 974.032035] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3b1fe96-dcb2-4806-a416-c0d19f9d1ffc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.053501] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 974.053501] env[68233]: value = "task-2782741" [ 974.053501] env[68233]: _type = "Task" [ 974.053501] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.060900] env[68233]: DEBUG oslo_vmware.api [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782737, 'name': PowerOffVM_Task, 'duration_secs': 0.325235} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.062036] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.062036] env[68233]: DEBUG nova.compute.manager [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.063668] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a978b0-724a-4fdb-bdbf-3717b41ab35b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.070523] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.116114] env[68233]: DEBUG nova.compute.manager [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Received event network-changed-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 974.116318] env[68233]: DEBUG nova.compute.manager [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Refreshing instance network info cache due to event network-changed-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 974.116547] env[68233]: DEBUG oslo_concurrency.lockutils [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] Acquiring lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.116697] env[68233]: DEBUG oslo_concurrency.lockutils [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] Acquired lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 974.116878] env[68233]: DEBUG nova.network.neutron [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Refreshing network info cache for port ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 974.211107] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 974.212479] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-056b4be1-55d9-404c-8f57-16390a3b59dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.220168] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 974.220168] env[68233]: value = "task-2782743" [ 974.220168] env[68233]: _type = "Task" [ 974.220168] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.232826] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782743, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.352938] env[68233]: DEBUG oslo_vmware.api [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782739, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199816} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.353299] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 974.353474] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 974.353674] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 974.353864] env[68233]: INFO nova.compute.manager [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 974.354162] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 974.354389] env[68233]: DEBUG nova.compute.manager [-] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 974.354495] env[68233]: DEBUG nova.network.neutron [-] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 974.373216] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782740, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.563776] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.579652] env[68233]: DEBUG oslo_concurrency.lockutils [None req-37d3063a-bdfc-4aa7-8e8e-05a68aab4754 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.738043] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782743, 'name': PowerOffVM_Task, 'duration_secs': 0.180893} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.738043] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 974.738043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f7127b-d90f-4a23-b34a-ef21ef543aaf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.759716] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1974a12a-6633-483e-b4cb-ea3975fe4023 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.871784] env[68233]: DEBUG oslo_vmware.api [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782740, 'name': PowerOnVM_Task, 'duration_secs': 0.628532} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.872077] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.872289] env[68233]: INFO nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Took 9.66 seconds to spawn the instance on the hypervisor. [ 974.872503] env[68233]: DEBUG nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.873308] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba96e706-9555-4a1e-b60c-b565654be327 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.064700] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.272180] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 975.276109] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a8750a52-a6f6-409d-9f31-27ddd255b358 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.292210] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 975.292210] env[68233]: value = "task-2782744" [ 975.292210] env[68233]: _type = "Task" [ 975.292210] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.301624] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782744, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.399709] env[68233]: INFO nova.compute.manager [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Took 28.66 seconds to build instance. [ 975.567907] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.572350] env[68233]: DEBUG nova.network.neutron [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updated VIF entry in instance network info cache for port ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 975.573081] env[68233]: DEBUG nova.network.neutron [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating instance_info_cache with network_info: [{"id": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "address": "fa:16:3e:82:fa:cc", "network": {"id": "e943db9f-aba9-4d7d-8556-edf5debb5109", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-672616402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a0b7eed382649819b33a9370cd51228", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5116f690-f825-4fee-8a47-42b073e716c5", "external-id": "nsx-vlan-transportzone-692", "segmentation_id": 692, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapce58be4a-6f", "ovs_interfaceid": "ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.706842] env[68233]: DEBUG nova.network.neutron [-] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.806593] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782744, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.905250] env[68233]: INFO nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Rebuilding instance [ 975.905842] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0d6b9b26-46aa-439d-b2b3-0017ce064871 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.171s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.972185] env[68233]: DEBUG nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 975.973786] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7511b1cf-40b9-4d82-b96f-b4f466d3b4d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.066183] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.075492] env[68233]: DEBUG oslo_concurrency.lockutils [req-7fdc4936-e587-40cf-876d-94e92d4c955c req-1e5c11a5-ab88-451b-ba3b-f61cde580714 service nova] Releasing lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.213519] env[68233]: INFO nova.compute.manager [-] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Took 1.86 seconds to deallocate network for instance. [ 976.306579] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782744, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.372889] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 976.373209] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559456', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'name': 'volume-303bdc2c-2328-431b-b19a-48cd4fd023fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '827711ac-ef52-41a0-9029-0a1805522a08', 'attached_at': '', 'detached_at': '', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'serial': '303bdc2c-2328-431b-b19a-48cd4fd023fd'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 976.374891] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4049442b-6146-4bbd-82ac-acafbca98ffd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.394949] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2584880d-6b89-4605-bb97-0a4d45c08ac6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.423334] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] volume-303bdc2c-2328-431b-b19a-48cd4fd023fd/volume-303bdc2c-2328-431b-b19a-48cd4fd023fd.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 976.424793] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b85df3ef-1d40-4d99-a759-d8df4fb1c4cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.441327] env[68233]: DEBUG nova.compute.manager [req-107b25f3-b434-46dd-860f-c60b5ef488ad req-38e634ec-533a-45cb-8b5d-209f37e3cee7 service nova] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Received event network-vif-deleted-008c3f5c-f83a-4833-99e9-7aa70aff0c0b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 976.448177] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 976.448177] env[68233]: value = "task-2782745" [ 976.448177] env[68233]: _type = "Task" [ 976.448177] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.456547] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.570325] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.723312] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.723312] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.723312] env[68233]: DEBUG nova.objects.instance [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lazy-loading 'resources' on Instance uuid 3f79709a-b8b7-4838-8731-d051155ff4f3 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.807391] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782744, 'name': CreateSnapshot_Task, 'duration_secs': 1.022476} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.808014] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 976.809095] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369223fb-be11-46b7-9997-dcabaa473ab1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.960400] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782745, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.991047] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 976.991047] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-59549ccb-e13d-4849-934b-20db88f43b7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.998732] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 976.998732] env[68233]: value = "task-2782746" [ 976.998732] env[68233]: _type = "Task" [ 976.998732] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.006590] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.067764] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.332251] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 977.335628] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6d352d1d-2ac2-4532-8be7-5c07775965db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.346898] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 977.346898] env[68233]: value = "task-2782747" [ 977.346898] env[68233]: _type = "Task" [ 977.346898] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.356135] env[68233]: DEBUG nova.compute.manager [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 977.356229] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b484454-6f45-4bae-936a-fdaee3bc9081 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.368424] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782747, 'name': CloneVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.460724] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782745, 'name': ReconfigVM_Task, 'duration_secs': 0.7476} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.461384] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Reconfigured VM instance instance-00000050 to attach disk [datastore2] volume-303bdc2c-2328-431b-b19a-48cd4fd023fd/volume-303bdc2c-2328-431b-b19a-48cd4fd023fd.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 977.468584] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df1342e9-3252-4a65-b6eb-43b2281c8a06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.487528] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 977.487528] env[68233]: value = "task-2782748" [ 977.487528] env[68233]: _type = "Task" [ 977.487528] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.493665] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d6379d4-f1fe-4158-8f8e-2e8c7a8afb2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.507406] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ebd0d5-d020-4533-aed4-2f34ca9020ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.510574] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.543249] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 977.543249] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 977.544079] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30490cc-d09d-4439-ae34-d56d53b261f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.547446] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e3cd0a-5fcc-476b-8b22-d8f9b83fb8d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.559484] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7000ba6-9653-4fc3-bae6-bf3877db970b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.561029] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 977.561386] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48508800-9ca4-4576-bae8-aeb42e354aab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.575669] env[68233]: DEBUG nova.compute.provider_tree [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.583222] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.764963] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 977.765659] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 977.765984] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore2] 9c0e581d-5856-470f-a737-301649d701e5 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 977.766782] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11590815-8b8c-4295-8600-2a33bd9b26da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.778414] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 977.778414] env[68233]: value = "task-2782750" [ 977.778414] env[68233]: _type = "Task" [ 977.778414] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.787478] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.829145] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "d4b69710-7f74-4755-8783-63e36c67f57a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.829547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.857570] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782747, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.872892] env[68233]: INFO nova.compute.manager [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] instance snapshotting [ 977.875722] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe12ac3a-6b77-403c-aec8-6f3d8a0ca680 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.896349] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d14e4d-c226-4ec8-927b-8f699b13e384 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.999234] env[68233]: DEBUG oslo_vmware.api [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782748, 'name': ReconfigVM_Task, 'duration_secs': 0.18657} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.000059] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559456', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'name': 'volume-303bdc2c-2328-431b-b19a-48cd4fd023fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '827711ac-ef52-41a0-9029-0a1805522a08', 'attached_at': '', 'detached_at': '', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'serial': '303bdc2c-2328-431b-b19a-48cd4fd023fd'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 978.072345] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.084020] env[68233]: DEBUG nova.scheduler.client.report [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.290634] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187133} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.291152] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 978.291462] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 978.291785] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 978.334151] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 978.365437] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782747, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.407836] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 978.408229] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5cbc8a26-df15-41d3-ab9d-f9b6ee4c01e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.416923] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 978.416923] env[68233]: value = "task-2782751" [ 978.416923] env[68233]: _type = "Task" [ 978.416923] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.427325] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782751, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.575125] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.590455] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.866s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.624536] env[68233]: INFO nova.scheduler.client.report [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 3f79709a-b8b7-4838-8731-d051155ff4f3 [ 978.858325] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782747, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.859869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.860167] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.861668] env[68233]: INFO nova.compute.claims [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.930467] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782751, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.048785] env[68233]: DEBUG nova.objects.instance [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lazy-loading 'flavor' on Instance uuid 827711ac-ef52-41a0-9029-0a1805522a08 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.073214] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.135335] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4341b194-75f2-4fd3-a3f0-e699bef34235 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "3f79709a-b8b7-4838-8731-d051155ff4f3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.418s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.336703] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 979.337050] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.337137] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 979.337331] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.337477] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 979.337623] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 979.337834] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 979.338024] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 979.338174] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 979.338355] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 979.338504] env[68233]: DEBUG nova.virt.hardware [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 979.339390] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891cbe2d-02ae-4c9e-b8ee-a60924fdaffd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.350084] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab21773-89c6-4b6c-84bb-58210f5b0777 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.369796] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:dd:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '33a2fb19-ac15-4669-ba90-af5e70070de2', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 979.378866] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 979.383059] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 979.383380] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782747, 'name': CloneVM_Task, 'duration_secs': 1.716787} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.384746] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-631e329c-d1d4-4f47-8ca6-d0c66926619f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.399931] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Created linked-clone VM from snapshot [ 979.401054] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517f215a-7177-435c-9572-f3133bec902c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.409518] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Uploading image d59a191b-5df7-4078-ba81-330dce0e225b {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 979.412700] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 979.412700] env[68233]: value = "task-2782752" [ 979.412700] env[68233]: _type = "Task" [ 979.412700] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.421727] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782752, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.437292] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782751, 'name': CreateSnapshot_Task, 'duration_secs': 0.886761} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.437292] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 979.437292] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c9f884b-74fc-4fba-85bf-d4d0dff83e5b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.452221] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 979.452221] env[68233]: value = "vm-559458" [ 979.452221] env[68233]: _type = "VirtualMachine" [ 979.452221] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 979.452814] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d7c85990-0a42-4d99-8be8-3841c631ca4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.459920] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lease: (returnval){ [ 979.459920] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52049fd9-a5ee-6bc5-801c-ff47448d4c33" [ 979.459920] env[68233]: _type = "HttpNfcLease" [ 979.459920] env[68233]: } obtained for exporting VM: (result){ [ 979.459920] env[68233]: value = "vm-559458" [ 979.459920] env[68233]: _type = "VirtualMachine" [ 979.459920] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 979.460519] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the lease: (returnval){ [ 979.460519] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52049fd9-a5ee-6bc5-801c-ff47448d4c33" [ 979.460519] env[68233]: _type = "HttpNfcLease" [ 979.460519] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 979.468863] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.468863] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52049fd9-a5ee-6bc5-801c-ff47448d4c33" [ 979.468863] env[68233]: _type = "HttpNfcLease" [ 979.468863] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.556366] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0a3e4c38-d3a4-4d6e-b02f-6f41befaea60 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.845s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.574880] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.614559] env[68233]: DEBUG oslo_concurrency.lockutils [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.614829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.930589] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782752, 'name': CreateVM_Task, 'duration_secs': 0.388216} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.931040] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 979.933786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.933786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.933786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 979.933786] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e9ecb86-5c15-4ba6-9834-723ba00c58b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.940448] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 979.940448] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52df1ea4-d87c-438c-ca85-08e4053f44dc" [ 979.940448] env[68233]: _type = "Task" [ 979.940448] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.956377] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52df1ea4-d87c-438c-ca85-08e4053f44dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009782} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.964123] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 979.964640] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 979.964735] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 979.968020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.968020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 979.968020] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 979.968302] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1792518d-a11f-4825-a537-822223013851 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.973906] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38ffdf8c-16e5-42be-b297-b67832e289dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.986248] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.986248] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52049fd9-a5ee-6bc5-801c-ff47448d4c33" [ 979.986248] env[68233]: _type = "HttpNfcLease" [ 979.986248] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 979.988280] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 979.988280] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52049fd9-a5ee-6bc5-801c-ff47448d4c33" [ 979.988280] env[68233]: _type = "HttpNfcLease" [ 979.988280] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 979.989109] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 979.989109] env[68233]: value = "task-2782754" [ 979.989109] env[68233]: _type = "Task" [ 979.989109] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.990796] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d10ca9-0715-46db-8f46-a6bd2cf3a08d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.993464] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 979.993628] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 979.999630] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d1d6148-96a0-4d05-93e7-6e705bd4d1ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.007331] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bc4a2-c94e-a215-b694-128f4ad4ea1c/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 980.007506] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bc4a2-c94e-a215-b694-128f4ad4ea1c/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 980.013201] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782754, 'name': CloneVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.013516] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 980.013516] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bf701e-49b6-eb4b-90ce-717684246fe7" [ 980.013516] env[68233]: _type = "Task" [ 980.013516] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.086281] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bf701e-49b6-eb4b-90ce-717684246fe7, 'name': SearchDatastore_Task, 'duration_secs': 0.01031} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.090267] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.090688] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a4d4f34-f169-4833-b38b-ee40783548bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.100620] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 980.100620] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fea61e-3d08-4997-a384-590732c48dd3" [ 980.100620] env[68233]: _type = "Task" [ 980.100620] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.109280] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fea61e-3d08-4997-a384-590732c48dd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.121319] env[68233]: INFO nova.compute.manager [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Detaching volume 303bdc2c-2328-431b-b19a-48cd4fd023fd [ 980.132625] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b4f07141-29a9-4543-b474-b17a12228cd6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.183589] env[68233]: INFO nova.virt.block_device [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Attempting to driver detach volume 303bdc2c-2328-431b-b19a-48cd4fd023fd from mountpoint /dev/sdb [ 980.183926] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 980.183999] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559456', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'name': 'volume-303bdc2c-2328-431b-b19a-48cd4fd023fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '827711ac-ef52-41a0-9029-0a1805522a08', 'attached_at': '', 'detached_at': '', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'serial': '303bdc2c-2328-431b-b19a-48cd4fd023fd'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 980.185144] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ef73f6-3b9a-4efc-83c6-95a1607b6e3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.218380] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98a43c9-8937-45e0-9629-484da19e1119 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.234609] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88544839-b6c7-407c-80eb-4449e369f50d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.255092] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf323c6f-e986-4b93-9b74-f4376484d5f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.271538] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] The volume has not been displaced from its original location: [datastore2] volume-303bdc2c-2328-431b-b19a-48cd4fd023fd/volume-303bdc2c-2328-431b-b19a-48cd4fd023fd.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 980.276229] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 980.279864] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5acc9f0d-cf59-4f23-90dc-aa15a65f9603 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.300377] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 980.300377] env[68233]: value = "task-2782755" [ 980.300377] env[68233]: _type = "Task" [ 980.300377] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.308599] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782755, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.340621] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea2ee8b-2c68-4ed0-95ba-279bfd8c4a8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.349468] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0ff723-cda0-4b90-8780-1b550d21ab47 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.386535] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fab785-5005-4bef-802a-01fc59b1c54c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.394962] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc906e05-0a46-4dc0-af3d-7cff79f29f92 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.409944] env[68233]: DEBUG nova.compute.provider_tree [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.504392] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782754, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.583495] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.614655] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fea61e-3d08-4997-a384-590732c48dd3, 'name': SearchDatastore_Task, 'duration_secs': 0.011757} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.614976] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 980.615415] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 980.615783] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20034992-18e6-4d79-a7ea-b86b039f5197 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.625149] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 980.625149] env[68233]: value = "task-2782756" [ 980.625149] env[68233]: _type = "Task" [ 980.625149] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.633710] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782756, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.815620] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782755, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.916119] env[68233]: DEBUG nova.scheduler.client.report [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.014210] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782754, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.026345] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.026761] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.087486] env[68233]: DEBUG oslo_vmware.api [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782741, 'name': ReconfigVM_Task, 'duration_secs': 6.836784} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.087671] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 981.088770] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Reconfigured VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 981.140548] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782756, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.316577] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782755, 'name': ReconfigVM_Task, 'duration_secs': 0.842404} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.316659] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 981.324888] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1116a94-479a-4c05-9d4b-dc4f0b2b99bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.347492] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 981.347492] env[68233]: value = "task-2782757" [ 981.347492] env[68233]: _type = "Task" [ 981.347492] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.363133] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782757, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.424242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.425285] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 981.479948] env[68233]: DEBUG nova.compute.manager [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-vif-deleted-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 981.480066] env[68233]: INFO nova.compute.manager [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Neutron deleted interface 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36; detaching it from the instance and deleting it from the info cache [ 981.480302] env[68233]: DEBUG nova.network.neutron [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.505359] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782754, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.534232] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 981.638805] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782756, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560083} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.640981] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 981.640981] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 981.640981] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-252edf50-5839-4b16-b253-536c937e83fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.648000] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 981.648000] env[68233]: value = "task-2782758" [ 981.648000] env[68233]: _type = "Task" [ 981.648000] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.657174] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.858183] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782757, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.893506] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "85313d15-04da-4f24-b203-bed5ebcbe1a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.893840] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.932259] env[68233]: DEBUG nova.compute.utils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 981.934523] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 981.935428] env[68233]: DEBUG nova.network.neutron [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 981.983897] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.984118] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] Acquired lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 981.987229] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12cb504-c1fd-49b1-a487-445667cc19b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.007611] env[68233]: DEBUG oslo_concurrency.lockutils [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] Releasing lock "73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 982.007939] env[68233]: WARNING nova.compute.manager [req-5d6d8106-e6eb-4a7a-9fd8-8ef921a57f5f req-06dbdd32-9642-45f8-b91d-46765c0ec8d0 service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Detach interface failed, port_id=60ae55cd-a0e9-4d27-b45d-0fb840eb0b36, reason: No device with interface-id 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 exists on VM: nova.exception.NotFound: No device with interface-id 60ae55cd-a0e9-4d27-b45d-0fb840eb0b36 exists on VM [ 982.010228] env[68233]: DEBUG nova.policy [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e45602ffbf4d66b6bfcac59f078e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd32ae322ad5641b4bebd1aa390b5914f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 982.021714] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782754, 'name': CloneVM_Task, 'duration_secs': 1.840057} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.022102] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Created linked-clone VM from snapshot [ 982.023042] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46edf0f-b036-44b2-973f-5832fb024e9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.032040] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Uploading image 34482c35-8481-4a93-b21c-3805126567cf {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 982.046696] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 982.047107] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ad7ec904-2ef9-4c18-8c8b-c451e18c1c19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.054370] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 982.054370] env[68233]: value = "task-2782759" [ 982.054370] env[68233]: _type = "Task" [ 982.054370] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.063019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.063019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.063511] env[68233]: INFO nova.compute.claims [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 982.072640] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782759, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.161841] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069344} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.163614] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 982.164442] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea028779-0767-47b7-a792-c731fa9cf113 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.189789] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Reconfiguring VM instance instance-00000055 to attach disk [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 982.190290] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8cf32b4-8996-4f84-9383-149984606669 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.213732] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 982.213732] env[68233]: value = "task-2782760" [ 982.213732] env[68233]: _type = "Task" [ 982.213732] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.222642] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782760, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.366138] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782757, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.398412] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 982.435509] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 982.450791] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.451092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.451316] env[68233]: DEBUG nova.network.neutron [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 982.464439] env[68233]: DEBUG nova.network.neutron [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Successfully created port: 019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.570163] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782759, 'name': Destroy_Task, 'duration_secs': 0.503895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.570613] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Destroyed the VM [ 982.570854] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 982.571099] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d95ff428-8e63-4aab-8754-8964c835b33f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.577508] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 982.577508] env[68233]: value = "task-2782761" [ 982.577508] env[68233]: _type = "Task" [ 982.577508] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.588392] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782761, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.724731] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782760, 'name': ReconfigVM_Task, 'duration_secs': 0.434663} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.725197] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Reconfigured VM instance instance-00000055 to attach disk [datastore2] 9c0e581d-5856-470f-a737-301649d701e5/9c0e581d-5856-470f-a737-301649d701e5.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 982.726280] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7d900dd-b718-42a5-8ac1-0335b26da116 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.733359] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 982.733359] env[68233]: value = "task-2782762" [ 982.733359] env[68233]: _type = "Task" [ 982.733359] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.746963] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782762, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.861017] env[68233]: DEBUG oslo_vmware.api [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782757, 'name': ReconfigVM_Task, 'duration_secs': 1.161135} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.868040] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559456', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'name': 'volume-303bdc2c-2328-431b-b19a-48cd4fd023fd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '827711ac-ef52-41a0-9029-0a1805522a08', 'attached_at': '', 'detached_at': '', 'volume_id': '303bdc2c-2328-431b-b19a-48cd4fd023fd', 'serial': '303bdc2c-2328-431b-b19a-48cd4fd023fd'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 982.926927] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.968591] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.969159] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "73ca71c0-34cd-4393-82ff-4b297d350209" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.969529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.970433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 982.970433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "73ca71c0-34cd-4393-82ff-4b297d350209-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.972398] env[68233]: INFO nova.compute.manager [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Terminating instance [ 983.087508] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782761, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.207062] env[68233]: DEBUG nova.network.neutron [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [{"id": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "address": "fa:16:3e:24:76:0e", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape331f25d-7a", "ovs_interfaceid": "e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.247591] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782762, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.320805] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92934cbd-29b3-4597-9ceb-8326e751a254 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.329366] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d59989-ea1d-44dd-804a-f558a35ccaf1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.369735] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9de266-0dcd-4fe8-8a6f-852c2a69dcd1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.378215] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8cfdee-d361-495b-837c-d54b5edc0e04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.126368] env[68233]: DEBUG nova.objects.instance [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lazy-loading 'flavor' on Instance uuid 827711ac-ef52-41a0-9029-0a1805522a08 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 984.131282] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 984.131282] env[68233]: DEBUG nova.compute.manager [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 984.131282] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 984.131544] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-73ca71c0-34cd-4393-82ff-4b297d350209" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.135996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be76af5-55d9-4665-9eb5-bf7e33d3b327 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.159686] env[68233]: DEBUG nova.compute.provider_tree [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.163171] env[68233]: DEBUG oslo_vmware.api [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782761, 'name': RemoveSnapshot_Task, 'duration_secs': 0.98455} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.169369] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 984.171708] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782762, 'name': Rename_Task, 'duration_secs': 1.012401} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.172746] env[68233]: DEBUG nova.compute.manager [req-f439e312-0a3f-4d1d-8c66-e0ed58c28a79 req-1eed5bda-09a9-458f-970f-468ed6df344f service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Received event network-vif-plugged-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 984.172950] env[68233]: DEBUG oslo_concurrency.lockutils [req-f439e312-0a3f-4d1d-8c66-e0ed58c28a79 req-1eed5bda-09a9-458f-970f-468ed6df344f service nova] Acquiring lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.173172] env[68233]: DEBUG oslo_concurrency.lockutils [req-f439e312-0a3f-4d1d-8c66-e0ed58c28a79 req-1eed5bda-09a9-458f-970f-468ed6df344f service nova] Lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.173341] env[68233]: DEBUG oslo_concurrency.lockutils [req-f439e312-0a3f-4d1d-8c66-e0ed58c28a79 req-1eed5bda-09a9-458f-970f-468ed6df344f service nova] Lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.173502] env[68233]: DEBUG nova.compute.manager [req-f439e312-0a3f-4d1d-8c66-e0ed58c28a79 req-1eed5bda-09a9-458f-970f-468ed6df344f service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] No waiting events found dispatching network-vif-plugged-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 984.173667] env[68233]: WARNING nova.compute.manager [req-f439e312-0a3f-4d1d-8c66-e0ed58c28a79 req-1eed5bda-09a9-458f-970f-468ed6df344f service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Received unexpected event network-vif-plugged-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd for instance with vm_state building and task_state spawning. [ 984.174275] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 984.175132] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 984.176991] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-919781a8-c053-483b-a3b1-ad99664b19c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.177392] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ef3c409-5ee7-4403-8665-8656d98b568f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.183368] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 984.183560] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 984.183776] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 984.183978] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 984.184135] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 984.184298] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 984.184505] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 984.184662] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 984.184822] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 984.184978] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 984.185160] env[68233]: DEBUG nova.virt.hardware [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 984.186527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bd150a-4a41-448a-8203-aa7bfb909d7d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.191311] env[68233]: DEBUG oslo_vmware.api [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 984.191311] env[68233]: value = "task-2782763" [ 984.191311] env[68233]: _type = "Task" [ 984.191311] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.191576] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 984.191576] env[68233]: value = "task-2782764" [ 984.191576] env[68233]: _type = "Task" [ 984.191576] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.202425] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c36d4b-e347-4564-bc63-8e1436837cdc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.213558] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782764, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.213891] env[68233]: DEBUG oslo_vmware.api [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.386654] env[68233]: DEBUG nova.network.neutron [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Successfully updated port: 019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.647467] env[68233]: DEBUG oslo_concurrency.lockutils [None req-23348aae-59e0-44a1-8387-9521ad5d90ad tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-73ca71c0-34cd-4393-82ff-4b297d350209-60ae55cd-a0e9-4d27-b45d-0fb840eb0b36" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.190s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.662843] env[68233]: DEBUG nova.scheduler.client.report [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.676408] env[68233]: WARNING nova.compute.manager [None req-14438101-5555-4dd0-b49a-56936fd9a78f tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Image not found during snapshot: nova.exception.ImageNotFound: Image 34482c35-8481-4a93-b21c-3805126567cf could not be found. [ 984.706067] env[68233]: DEBUG oslo_vmware.api [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782763, 'name': PowerOffVM_Task, 'duration_secs': 0.235858} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.709376] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 984.709714] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 984.710364] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782764, 'name': PowerOnVM_Task, 'duration_secs': 0.512079} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.710643] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c45bc456-6ff4-4d82-a321-32f036ce6bd7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.712364] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 984.712641] env[68233]: DEBUG nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 984.713461] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4c8bbf-7e0f-4573-8158-6fabdf18779b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.778542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 984.779367] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 984.779367] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleting the datastore file [datastore1] 73ca71c0-34cd-4393-82ff-4b297d350209 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.779367] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c371efcb-743e-4258-ad27-0108d613de4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.785951] env[68233]: DEBUG oslo_vmware.api [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 984.785951] env[68233]: value = "task-2782766" [ 984.785951] env[68233]: _type = "Task" [ 984.785951] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.794284] env[68233]: DEBUG oslo_vmware.api [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.891027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.891234] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 984.891408] env[68233]: DEBUG nova.network.neutron [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 985.140373] env[68233]: DEBUG oslo_concurrency.lockutils [None req-757139bc-a220-4b48-94d8-fb9d7f00a82a tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 5.525s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.168828] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.169436] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 985.176023] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.246s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.176023] env[68233]: INFO nova.compute.claims [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.177596] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "edf4bfac-175b-40b7-bf08-298c4735bfae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.177900] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.178149] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "edf4bfac-175b-40b7-bf08-298c4735bfae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.179122] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.179122] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.180971] env[68233]: INFO nova.compute.manager [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Terminating instance [ 985.227809] env[68233]: INFO nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] bringing vm to original state: 'stopped' [ 985.297850] env[68233]: DEBUG oslo_vmware.api [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282545} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.298426] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 985.298426] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 985.298671] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 985.299217] env[68233]: INFO nova.compute.manager [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Took 1.17 seconds to destroy the instance on the hypervisor. [ 985.299217] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 985.299470] env[68233]: DEBUG nova.compute.manager [-] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 985.299626] env[68233]: DEBUG nova.network.neutron [-] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 985.446953] env[68233]: DEBUG nova.network.neutron [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 985.611569] env[68233]: DEBUG nova.network.neutron [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updating instance_info_cache with network_info: [{"id": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "address": "fa:16:3e:ef:bf:2c", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap019c95d6-3c", "ovs_interfaceid": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.682200] env[68233]: DEBUG nova.compute.utils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 985.687181] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 985.687313] env[68233]: DEBUG nova.network.neutron [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 985.693601] env[68233]: DEBUG nova.compute.manager [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 985.693941] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 985.695775] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641efca7-1c65-4d26-8e29-27823e967429 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.707962] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 985.709619] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-754db820-f55a-4d39-ae67-fcfdf642bc5b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.718096] env[68233]: DEBUG oslo_vmware.api [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 985.718096] env[68233]: value = "task-2782767" [ 985.718096] env[68233]: _type = "Task" [ 985.718096] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.738071] env[68233]: DEBUG oslo_vmware.api [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.750074] env[68233]: DEBUG nova.policy [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 986.109731] env[68233]: DEBUG nova.network.neutron [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Successfully created port: 5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 986.117476] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.117802] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Instance network_info: |[{"id": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "address": "fa:16:3e:ef:bf:2c", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap019c95d6-3c", "ovs_interfaceid": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 986.118269] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:bf:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '019c95d6-3ce8-430d-9fe1-c9d866f5f2cd', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.126170] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.126395] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.126616] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1082053-e221-4add-9256-3bb7444f97e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.154488] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.154488] env[68233]: value = "task-2782768" [ 986.154488] env[68233]: _type = "Task" [ 986.154488] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.165018] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782768, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.201501] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 986.234062] env[68233]: DEBUG oslo_vmware.api [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782767, 'name': PowerOffVM_Task, 'duration_secs': 0.23946} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.235679] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 986.236169] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 986.236272] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5afc67f-49e0-4cfd-9b79-843c56c83a4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.241345] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "9c0e581d-5856-470f-a737-301649d701e5" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.241444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.242087] env[68233]: DEBUG nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 986.244138] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9976aa-471b-4fbb-8bf8-5b09cedbee8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.254848] env[68233]: DEBUG nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 986.376034] env[68233]: DEBUG nova.compute.manager [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Received event network-changed-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 986.376034] env[68233]: DEBUG nova.compute.manager [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Refreshing instance network info cache due to event network-changed-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 986.376195] env[68233]: DEBUG oslo_concurrency.lockutils [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] Acquiring lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.376336] env[68233]: DEBUG oslo_concurrency.lockutils [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] Acquired lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.376503] env[68233]: DEBUG nova.network.neutron [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Refreshing network info cache for port 019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.544242] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155ba69b-d5b7-4a98-961c-1a18c473e3cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.552790] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf6fb6a-8c89-4a63-9225-e68dc2f92c2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.588879] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c05d361-0542-451d-841a-191060c3e195 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.598762] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c799f7-3ae5-4be2-adf3-286e61449190 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.612674] env[68233]: DEBUG nova.compute.provider_tree [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.663790] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782768, 'name': CreateVM_Task, 'duration_secs': 0.444053} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.663981] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 986.664702] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.664869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.665190] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 986.665442] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f6ecc00-7de1-4e18-9a71-4a32fa07c94d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.669894] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 986.669894] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520d3312-7231-8e03-b4b2-c2acc67ed3d7" [ 986.669894] env[68233]: _type = "Task" [ 986.669894] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.679208] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520d3312-7231-8e03-b4b2-c2acc67ed3d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.712272] env[68233]: DEBUG nova.network.neutron [-] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.760675] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 986.761225] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-21ee1b1e-97cd-43d4-b212-1b8f6a966b2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.771464] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 986.771464] env[68233]: value = "task-2782770" [ 986.771464] env[68233]: _type = "Task" [ 986.771464] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.780219] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782770, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.966565] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "d926386c-8543-4a6e-a782-588680cb5f34" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.966907] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "d926386c-8543-4a6e-a782-588680cb5f34" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.115581] env[68233]: DEBUG nova.scheduler.client.report [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.184590] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520d3312-7231-8e03-b4b2-c2acc67ed3d7, 'name': SearchDatastore_Task, 'duration_secs': 0.017769} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.184590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.184590] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.184590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.184590] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.184590] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.184590] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3358e9c2-4b16-470b-ad24-912adafac5e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.193190] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.194359] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.194523] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0052c87d-0e65-43c0-b0d5-aada5504998c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.201496] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 987.201496] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52557331-a616-cacc-15d5-e8a8210cb6aa" [ 987.201496] env[68233]: _type = "Task" [ 987.201496] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.210270] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52557331-a616-cacc-15d5-e8a8210cb6aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.214463] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 987.215230] env[68233]: INFO nova.compute.manager [-] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Took 1.92 seconds to deallocate network for instance. [ 987.242645] env[68233]: DEBUG nova.network.neutron [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updated VIF entry in instance network info cache for port 019c95d6-3ce8-430d-9fe1-c9d866f5f2cd. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.243162] env[68233]: DEBUG nova.network.neutron [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updating instance_info_cache with network_info: [{"id": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "address": "fa:16:3e:ef:bf:2c", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap019c95d6-3c", "ovs_interfaceid": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.251716] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 987.251976] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 987.252161] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 987.252347] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 987.254283] env[68233]: DEBUG nova.virt.hardware [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 987.254693] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281cbfbf-d0a5-42d2-ba7f-5591d5cc0277 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.264462] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69b2a4df-92c1-43ed-b68f-170290021323 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.292190] env[68233]: DEBUG oslo_vmware.api [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782770, 'name': PowerOffVM_Task, 'duration_secs': 0.224132} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.292488] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 987.292633] env[68233]: DEBUG nova.compute.manager [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 987.293437] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951863b5-48b9-468c-9c33-322d8395b55c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.471417] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 987.592240] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.592521] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.619606] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.447s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.620256] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 987.714076] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52557331-a616-cacc-15d5-e8a8210cb6aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010763} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.714915] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5774a639-1e8a-4d6d-b025-e8b78f8e38da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.721569] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 987.721569] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52952417-186a-a0e5-96b9-d6b0d1446cff" [ 987.721569] env[68233]: _type = "Task" [ 987.721569] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.727905] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.728231] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.728466] env[68233]: DEBUG nova.objects.instance [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'resources' on Instance uuid 73ca71c0-34cd-4393-82ff-4b297d350209 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 987.735278] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52952417-186a-a0e5-96b9-d6b0d1446cff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.746278] env[68233]: DEBUG oslo_concurrency.lockutils [req-56f2512e-c582-408a-ad51-795a00979429 req-f36f7158-138f-4f22-99aa-45a2902226fc service nova] Releasing lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.774387] env[68233]: DEBUG nova.compute.manager [req-39508787-72b7-40d1-af09-d9ff1448d958 req-f52017f1-e085-4311-a690-ff228940a2e7 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Received event network-vif-plugged-5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 987.774598] env[68233]: DEBUG oslo_concurrency.lockutils [req-39508787-72b7-40d1-af09-d9ff1448d958 req-f52017f1-e085-4311-a690-ff228940a2e7 service nova] Acquiring lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.774883] env[68233]: DEBUG oslo_concurrency.lockutils [req-39508787-72b7-40d1-af09-d9ff1448d958 req-f52017f1-e085-4311-a690-ff228940a2e7 service nova] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.775325] env[68233]: DEBUG oslo_concurrency.lockutils [req-39508787-72b7-40d1-af09-d9ff1448d958 req-f52017f1-e085-4311-a690-ff228940a2e7 service nova] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.775634] env[68233]: DEBUG nova.compute.manager [req-39508787-72b7-40d1-af09-d9ff1448d958 req-f52017f1-e085-4311-a690-ff228940a2e7 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] No waiting events found dispatching network-vif-plugged-5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 987.777063] env[68233]: WARNING nova.compute.manager [req-39508787-72b7-40d1-af09-d9ff1448d958 req-f52017f1-e085-4311-a690-ff228940a2e7 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Received unexpected event network-vif-plugged-5d6c2240-95b2-4748-bb07-13b4092e246a for instance with vm_state building and task_state spawning. [ 987.809244] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.567s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.848643] env[68233]: DEBUG nova.network.neutron [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Successfully updated port: 5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.002683] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.098021] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 988.128471] env[68233]: DEBUG nova.compute.utils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 988.129903] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 988.130087] env[68233]: DEBUG nova.network.neutron [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 988.175510] env[68233]: DEBUG nova.policy [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65225f2affe34ceda9a265989bddfc9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74a353ea173c4b8bb74b84032d4e12b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 988.235749] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52952417-186a-a0e5-96b9-d6b0d1446cff, 'name': SearchDatastore_Task, 'duration_secs': 0.010725} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.236287] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.236572] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d4b69710-7f74-4755-8783-63e36c67f57a/d4b69710-7f74-4755-8783-63e36c67f57a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.236855] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59e067d3-c7a2-435f-830a-7ebec1f25abc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.244773] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 988.244773] env[68233]: value = "task-2782771" [ 988.244773] env[68233]: _type = "Task" [ 988.244773] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.253847] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782771, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.317555] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.319789] env[68233]: INFO nova.compute.manager [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Rebuilding instance [ 988.353406] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-56fb49f0-4b2b-4501-8ded-34dff1278a0c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.353406] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-56fb49f0-4b2b-4501-8ded-34dff1278a0c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.353406] env[68233]: DEBUG nova.network.neutron [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 988.372840] env[68233]: DEBUG nova.compute.manager [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 988.373075] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6b6652-0370-427d-8350-5b9f45fe1c8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.408215] env[68233]: DEBUG nova.compute.manager [req-90ccadc8-6e40-47e2-a11d-ffa1fb483e9d req-cc615ba9-0509-45ab-8e5b-28d02275347c service nova] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Received event network-vif-deleted-e331f25d-7ad6-4a4d-af3e-0b2fd1baf8de {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 988.478039] env[68233]: DEBUG nova.network.neutron [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Successfully created port: 4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.527521] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b797958-3ea4-4783-977d-17e8cb096eba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.537199] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e274c4de-3332-46b9-a2bd-da4dd68404a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.576161] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8ee747-50dc-4c55-92a2-b0edba9538d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.587059] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fb2cb9-ba53-435e-af83-97c0b3af4d5a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.604688] env[68233]: DEBUG nova.compute.provider_tree [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 988.624336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.633963] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 988.757569] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782771, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509097} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.757842] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d4b69710-7f74-4755-8783-63e36c67f57a/d4b69710-7f74-4755-8783-63e36c67f57a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.758100] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.758383] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33f1fdb3-7550-403d-8dae-b0ba41ea3b8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.765097] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 988.765097] env[68233]: value = "task-2782772" [ 988.765097] env[68233]: _type = "Task" [ 988.765097] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.774796] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782772, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.809156] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "9c0e581d-5856-470f-a737-301649d701e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.809439] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.809730] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "9c0e581d-5856-470f-a737-301649d701e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.809927] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.810123] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.812384] env[68233]: INFO nova.compute.manager [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Terminating instance [ 988.896066] env[68233]: DEBUG nova.network.neutron [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.097081] env[68233]: DEBUG nova.network.neutron [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Updating instance_info_cache with network_info: [{"id": "5d6c2240-95b2-4748-bb07-13b4092e246a", "address": "fa:16:3e:7e:b3:bd", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6c2240-95", "ovs_interfaceid": "5d6c2240-95b2-4748-bb07-13b4092e246a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.112534] env[68233]: DEBUG nova.scheduler.client.report [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.277651] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782772, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064827} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.277931] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.278803] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c34d6b-d39b-4f4b-909b-b1672aba76ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.302724] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] d4b69710-7f74-4755-8783-63e36c67f57a/d4b69710-7f74-4755-8783-63e36c67f57a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.304072] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c74969fd-6844-4104-a9fc-2319cb608770 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.318014] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.318301] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.318442] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleting the datastore file [datastore2] edf4bfac-175b-40b7-bf08-298c4735bfae {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.319363] env[68233]: DEBUG nova.compute.manager [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 989.319551] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.319807] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2ef6f93-7e8a-432f-a94e-fc25799048b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.322294] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643265fe-5bb1-44f5-8e33-a9a3709beafb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.329633] env[68233]: DEBUG oslo_vmware.api [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for the task: (returnval){ [ 989.329633] env[68233]: value = "task-2782773" [ 989.329633] env[68233]: _type = "Task" [ 989.329633] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.332279] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 989.332279] env[68233]: value = "task-2782774" [ 989.332279] env[68233]: _type = "Task" [ 989.332279] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.332522] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.335620] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44748eff-41b6-4e9a-8f88-e2b5ea781c93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.344588] env[68233]: DEBUG oslo_vmware.api [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.348531] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782774, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.392636] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.393077] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-770905d4-ff8c-4bbe-9652-b114401a4def {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.400727] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 989.400727] env[68233]: value = "task-2782776" [ 989.400727] env[68233]: _type = "Task" [ 989.400727] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.412231] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.416662] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 989.416956] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 989.417236] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore2] 9c0e581d-5856-470f-a737-301649d701e5 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 989.417564] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-560909b0-48b0-4f13-8ea0-31615f1ec388 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.424192] env[68233]: DEBUG oslo_vmware.api [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 989.424192] env[68233]: value = "task-2782777" [ 989.424192] env[68233]: _type = "Task" [ 989.424192] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.432553] env[68233]: DEBUG oslo_vmware.api [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.551102] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bc4a2-c94e-a215-b694-128f4ad4ea1c/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 989.552199] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e066264d-d49b-44bd-a545-bdbaddb58afc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.558913] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bc4a2-c94e-a215-b694-128f4ad4ea1c/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 989.559168] env[68233]: ERROR oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bc4a2-c94e-a215-b694-128f4ad4ea1c/disk-0.vmdk due to incomplete transfer. [ 989.559451] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-38078bd2-c350-4810-97e1-a7f1a785d912 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.567346] env[68233]: DEBUG oslo_vmware.rw_handles [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526bc4a2-c94e-a215-b694-128f4ad4ea1c/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 989.567572] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Uploaded image d59a191b-5df7-4078-ba81-330dce0e225b to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 989.570373] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 989.570662] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2e40afaa-5d85-4327-9aa2-6d01e4342970 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.576398] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 989.576398] env[68233]: value = "task-2782778" [ 989.576398] env[68233]: _type = "Task" [ 989.576398] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.584501] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782778, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.599188] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-56fb49f0-4b2b-4501-8ded-34dff1278a0c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.599505] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Instance network_info: |[{"id": "5d6c2240-95b2-4748-bb07-13b4092e246a", "address": "fa:16:3e:7e:b3:bd", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6c2240-95", "ovs_interfaceid": "5d6c2240-95b2-4748-bb07-13b4092e246a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 989.600023] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:b3:bd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d6c2240-95b2-4748-bb07-13b4092e246a', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 989.607680] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.607989] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 989.608161] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c8a07831-fe43-4a1c-a240-c8ecb5eadf79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.623877] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.896s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 989.626473] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.624s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 989.628018] env[68233]: INFO nova.compute.claims [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 989.635557] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 989.635557] env[68233]: value = "task-2782779" [ 989.635557] env[68233]: _type = "Task" [ 989.635557] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.642948] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 989.648291] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782779, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.649295] env[68233]: INFO nova.scheduler.client.report [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted allocations for instance 73ca71c0-34cd-4393-82ff-4b297d350209 [ 989.668034] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 989.668838] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 989.668838] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 989.668838] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 989.668838] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 989.669182] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 989.669182] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 989.669327] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 989.669536] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 989.669705] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 989.669960] env[68233]: DEBUG nova.virt.hardware [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 989.671028] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c532f90c-af5a-4590-b8db-066c37a53809 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.680026] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ce9b5c-6b72-4b5c-9bc1-066846001aac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.809887] env[68233]: DEBUG nova.compute.manager [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Received event network-changed-5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 989.810042] env[68233]: DEBUG nova.compute.manager [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Refreshing instance network info cache due to event network-changed-5d6c2240-95b2-4748-bb07-13b4092e246a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 989.810218] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] Acquiring lock "refresh_cache-56fb49f0-4b2b-4501-8ded-34dff1278a0c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.810364] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] Acquired lock "refresh_cache-56fb49f0-4b2b-4501-8ded-34dff1278a0c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.810523] env[68233]: DEBUG nova.network.neutron [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Refreshing network info cache for port 5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 989.848122] env[68233]: DEBUG oslo_vmware.api [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Task: {'id': task-2782773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215679} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.851219] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.851420] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.851597] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.851831] env[68233]: INFO nova.compute.manager [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Took 4.16 seconds to destroy the instance on the hypervisor. [ 989.852095] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.852300] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782774, 'name': ReconfigVM_Task, 'duration_secs': 0.382164} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.852583] env[68233]: DEBUG nova.compute.manager [-] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.852707] env[68233]: DEBUG nova.network.neutron [-] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 989.855081] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Reconfigured VM instance instance-00000059 to attach disk [datastore2] d4b69710-7f74-4755-8783-63e36c67f57a/d4b69710-7f74-4755-8783-63e36c67f57a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.855990] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4fffc7f1-4ef9-4a5e-a0ae-7651857b87b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.862845] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 989.862845] env[68233]: value = "task-2782780" [ 989.862845] env[68233]: _type = "Task" [ 989.862845] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.874565] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782780, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.913805] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782776, 'name': PowerOffVM_Task, 'duration_secs': 0.245109} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.914333] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 989.914681] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.915729] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de3a513-128d-48a8-9592-30b24172bfef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.924310] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 989.924310] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1a01a55-9890-4169-aed7-7e14f0f5f1ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.941961] env[68233]: DEBUG oslo_vmware.api [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184564} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.942439] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 989.942787] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 989.943119] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 989.943738] env[68233]: INFO nova.compute.manager [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Took 0.62 seconds to destroy the instance on the hypervisor. [ 989.944114] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 989.944468] env[68233]: DEBUG nova.compute.manager [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 989.944721] env[68233]: DEBUG nova.network.neutron [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 990.004122] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.005176] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.005176] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleting the datastore file [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.006134] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9dd52b4-5e66-4977-9d35-3397ffa0cd7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.014022] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 990.014022] env[68233]: value = "task-2782782" [ 990.014022] env[68233]: _type = "Task" [ 990.014022] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.020645] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.036195] env[68233]: DEBUG nova.network.neutron [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Successfully updated port: 4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.086972] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782778, 'name': Destroy_Task, 'duration_secs': 0.378115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.087541] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Destroyed the VM [ 990.088082] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 990.088485] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-78400fe1-28b2-4f4a-9203-ef513793f2bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.097076] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 990.097076] env[68233]: value = "task-2782783" [ 990.097076] env[68233]: _type = "Task" [ 990.097076] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.103926] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782783, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.158151] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782779, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.158151] env[68233]: DEBUG oslo_concurrency.lockutils [None req-35846aba-7858-4fdd-b069-303edaa97812 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "73ca71c0-34cd-4393-82ff-4b297d350209" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.188s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.376764] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782780, 'name': Rename_Task, 'duration_secs': 0.396878} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.377156] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 990.377451] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c8767808-c517-4022-b0e0-d301c6e334a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.387104] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 990.387104] env[68233]: value = "task-2782784" [ 990.387104] env[68233]: _type = "Task" [ 990.387104] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.396838] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.525133] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.323469} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.525551] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 990.525960] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 990.526273] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 990.541929] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-85313d15-04da-4f24-b203-bed5ebcbe1a9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.541929] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-85313d15-04da-4f24-b203-bed5ebcbe1a9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.542067] env[68233]: DEBUG nova.network.neutron [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 990.591859] env[68233]: DEBUG nova.network.neutron [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Updated VIF entry in instance network info cache for port 5d6c2240-95b2-4748-bb07-13b4092e246a. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 990.595560] env[68233]: DEBUG nova.network.neutron [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Updating instance_info_cache with network_info: [{"id": "5d6c2240-95b2-4748-bb07-13b4092e246a", "address": "fa:16:3e:7e:b3:bd", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6c2240-95", "ovs_interfaceid": "5d6c2240-95b2-4748-bb07-13b4092e246a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.607742] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782783, 'name': RemoveSnapshot_Task, 'duration_secs': 0.366748} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.608601] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 990.608877] env[68233]: DEBUG nova.compute.manager [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 990.609957] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-621a7f90-d2fe-4f86-b5cd-3f7e449e3410 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.650444] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782779, 'name': CreateVM_Task, 'duration_secs': 0.667314} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.650444] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.650444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.650444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.650444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 990.650444] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3572a4f9-a312-489f-bce6-bc3effed62ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.657559] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 990.657559] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fcdf21-f8d2-1838-ab60-f31fd3aac072" [ 990.657559] env[68233]: _type = "Task" [ 990.657559] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.664561] env[68233]: DEBUG nova.compute.manager [req-f971969f-3699-4bc8-93ba-356b1bfa0645 req-412e5adf-1d2e-4bea-b294-aed276f189de service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Received event network-vif-deleted-bf0551bd-3228-40bf-84cb-a459a20639b8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 990.664827] env[68233]: INFO nova.compute.manager [req-f971969f-3699-4bc8-93ba-356b1bfa0645 req-412e5adf-1d2e-4bea-b294-aed276f189de service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Neutron deleted interface bf0551bd-3228-40bf-84cb-a459a20639b8; detaching it from the instance and deleting it from the info cache [ 990.665007] env[68233]: DEBUG nova.network.neutron [req-f971969f-3699-4bc8-93ba-356b1bfa0645 req-412e5adf-1d2e-4bea-b294-aed276f189de service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.673579] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fcdf21-f8d2-1838-ab60-f31fd3aac072, 'name': SearchDatastore_Task, 'duration_secs': 0.00991} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.674238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.674468] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 990.674714] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.674881] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.675162] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 990.675553] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-45b17e15-aec9-4ac9-8fff-2573626c6571 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.689280] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 990.692622] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 990.692622] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b1c1fc-6fe8-4762-a0d9-f38068a3f571 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.695519] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 990.695519] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5277583e-ef91-cd20-2078-b3dff12a79fc" [ 990.695519] env[68233]: _type = "Task" [ 990.695519] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.703723] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5277583e-ef91-cd20-2078-b3dff12a79fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.899404] env[68233]: DEBUG oslo_vmware.api [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782784, 'name': PowerOnVM_Task, 'duration_secs': 0.49201} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.899714] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.899940] env[68233]: INFO nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Took 6.77 seconds to spawn the instance on the hypervisor. [ 990.900138] env[68233]: DEBUG nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 990.902248] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a244485-1955-4060-a7b9-93649facbfd5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.905256] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b9aff0-7649-4bf1-9d77-8002f3e24dc8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.917746] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ca4095-c730-4a34-91a0-382092731b61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.954535] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33232c40-b701-41c4-9f97-b94043bd554f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.963281] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e142edf8-d122-420f-bf6b-a5934eab7432 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.978122] env[68233]: DEBUG nova.compute.provider_tree [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.098673] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d7d1742-9c89-4301-996d-23c7ebbfcb71 req-c16a7781-39f0-449c-a5a5-d1d1e7592713 service nova] Releasing lock "refresh_cache-56fb49f0-4b2b-4501-8ded-34dff1278a0c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.111196] env[68233]: DEBUG nova.network.neutron [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.122303] env[68233]: INFO nova.compute.manager [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Shelve offloading [ 991.145730] env[68233]: DEBUG nova.network.neutron [-] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.149467] env[68233]: DEBUG nova.network.neutron [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.175643] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b13f1d6-32de-4a04-a89b-c3f1f91e1d05 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.188021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82320944-67fc-4290-9277-8051b6c8215b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.212745] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5277583e-ef91-cd20-2078-b3dff12a79fc, 'name': SearchDatastore_Task, 'duration_secs': 0.009031} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.213718] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d2c2ac6-8284-4615-92fa-96b02a1b3759 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.226869] env[68233]: DEBUG nova.compute.manager [req-f971969f-3699-4bc8-93ba-356b1bfa0645 req-412e5adf-1d2e-4bea-b294-aed276f189de service nova] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Detach interface failed, port_id=bf0551bd-3228-40bf-84cb-a459a20639b8, reason: Instance edf4bfac-175b-40b7-bf08-298c4735bfae could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 991.232730] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 991.232730] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9de44-87f4-415d-7783-223914370d02" [ 991.232730] env[68233]: _type = "Task" [ 991.232730] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.239983] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9de44-87f4-415d-7783-223914370d02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.354379] env[68233]: DEBUG nova.network.neutron [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Updating instance_info_cache with network_info: [{"id": "4836d496-c0c3-42a6-8b3c-e86a6660174e", "address": "fa:16:3e:8e:c7:7a", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4836d496-c0", "ovs_interfaceid": "4836d496-c0c3-42a6-8b3c-e86a6660174e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.423361] env[68233]: INFO nova.compute.manager [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Took 12.59 seconds to build instance. [ 991.481695] env[68233]: DEBUG nova.scheduler.client.report [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.565914] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.566213] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.566343] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.566524] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.566666] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.566815] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.567046] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.567206] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.567371] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.567528] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.567697] env[68233]: DEBUG nova.virt.hardware [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.568869] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe439f50-21f2-4e01-bfae-77ae4df5ac6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.577061] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ffa882b-dff0-42bf-af6a-26e2e0154ab6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.592201] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:8a:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef237162-2628-4a17-9afd-7a418911f222', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.598661] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 991.599357] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 991.599357] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32176a2e-0111-4c57-b07d-4218ea34ccb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.619061] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.619061] env[68233]: value = "task-2782785" [ 991.619061] env[68233]: _type = "Task" [ 991.619061] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.627198] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 991.627414] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782785, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.627875] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ce929f2f-9e69-446f-97ff-d9fca8098151 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.632391] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 991.632391] env[68233]: value = "task-2782786" [ 991.632391] env[68233]: _type = "Task" [ 991.632391] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.642100] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782786, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.648947] env[68233]: INFO nova.compute.manager [-] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Took 1.80 seconds to deallocate network for instance. [ 991.651963] env[68233]: INFO nova.compute.manager [-] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Took 1.71 seconds to deallocate network for instance. [ 991.743117] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e9de44-87f4-415d-7783-223914370d02, 'name': SearchDatastore_Task, 'duration_secs': 0.0285} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.744103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.744103] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 56fb49f0-4b2b-4501-8ded-34dff1278a0c/56fb49f0-4b2b-4501-8ded-34dff1278a0c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.744103] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a2b34208-f639-41a5-96ba-2a748d4de846 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.750179] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 991.750179] env[68233]: value = "task-2782787" [ 991.750179] env[68233]: _type = "Task" [ 991.750179] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.757990] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782787, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.856983] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-85313d15-04da-4f24-b203-bed5ebcbe1a9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.857246] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance network_info: |[{"id": "4836d496-c0c3-42a6-8b3c-e86a6660174e", "address": "fa:16:3e:8e:c7:7a", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4836d496-c0", "ovs_interfaceid": "4836d496-c0c3-42a6-8b3c-e86a6660174e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 991.857761] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:c7:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4836d496-c0c3-42a6-8b3c-e86a6660174e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.865598] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating folder: Project (74a353ea173c4b8bb74b84032d4e12b0). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 991.865598] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5141853f-6cfc-40ba-9718-4a6d46663263 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.876096] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created folder: Project (74a353ea173c4b8bb74b84032d4e12b0) in parent group-v559223. [ 991.876299] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating folder: Instances. Parent ref: group-v559465. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 991.876541] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4fa35eb1-07f0-48ea-bf40-ad7d85a88744 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.885840] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created folder: Instances in parent group-v559465. [ 991.886287] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 991.886545] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 991.886814] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da917c3f-7c77-46dd-a9dc-4e01d8cb05dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.907516] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.907516] env[68233]: value = "task-2782790" [ 991.907516] env[68233]: _type = "Task" [ 991.907516] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.916346] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782790, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.929023] env[68233]: DEBUG oslo_concurrency.lockutils [None req-22d70de2-3957-4b8e-80dd-47491c654290 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.099s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.986614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.987180] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 991.990699] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.673s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.990890] env[68233]: DEBUG nova.objects.instance [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 991.994140] env[68233]: DEBUG nova.compute.manager [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Received event network-vif-plugged-4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 991.994392] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] Acquiring lock "85313d15-04da-4f24-b203-bed5ebcbe1a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 991.994527] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.994819] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.994984] env[68233]: DEBUG nova.compute.manager [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] No waiting events found dispatching network-vif-plugged-4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 991.995178] env[68233]: WARNING nova.compute.manager [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Received unexpected event network-vif-plugged-4836d496-c0c3-42a6-8b3c-e86a6660174e for instance with vm_state building and task_state spawning. [ 991.995354] env[68233]: DEBUG nova.compute.manager [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Received event network-changed-4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 991.995486] env[68233]: DEBUG nova.compute.manager [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Refreshing instance network info cache due to event network-changed-4836d496-c0c3-42a6-8b3c-e86a6660174e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 991.995669] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] Acquiring lock "refresh_cache-85313d15-04da-4f24-b203-bed5ebcbe1a9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.995804] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] Acquired lock "refresh_cache-85313d15-04da-4f24-b203-bed5ebcbe1a9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.995972] env[68233]: DEBUG nova.network.neutron [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Refreshing network info cache for port 4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.129510] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782785, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.143590] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 992.144140] env[68233]: DEBUG nova.compute.manager [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.144857] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c417b75-9c8b-4290-984d-d26a4b797459 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.151156] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.151357] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.151528] env[68233]: DEBUG nova.network.neutron [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.160870] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.161785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.260273] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782787, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.417316] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782790, 'name': CreateVM_Task, 'duration_secs': 0.498714} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.417466] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.418188] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.418352] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.418668] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 992.418916] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b1bb1cb-117e-409b-8540-7b033fce8e3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.423480] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 992.423480] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52188cb7-d2c9-6312-5fad-4c385bd3c3f7" [ 992.423480] env[68233]: _type = "Task" [ 992.423480] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.431811] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52188cb7-d2c9-6312-5fad-4c385bd3c3f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.499391] env[68233]: DEBUG nova.compute.utils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 992.504730] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 992.504921] env[68233]: DEBUG nova.network.neutron [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 992.531136] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.531388] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.560070] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "d4b69710-7f74-4755-8783-63e36c67f57a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.560329] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.560508] env[68233]: INFO nova.compute.manager [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Shelving [ 992.608827] env[68233]: DEBUG nova.policy [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '309fbc1ccdf44918a272d8cd64c63af0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61a69000592d412587562d2d0f890515', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 992.629808] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782785, 'name': CreateVM_Task, 'duration_secs': 0.703663} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.630100] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 992.631085] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.762560] env[68233]: DEBUG nova.compute.manager [req-1cf9314c-240b-41a1-b030-c70960f9b57b req-6d421018-f845-4116-9528-6f128929321e service nova] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Received event network-vif-deleted-33a2fb19-ac15-4669-ba90-af5e70070de2 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 992.768326] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782787, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.660856} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.768326] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 56fb49f0-4b2b-4501-8ded-34dff1278a0c/56fb49f0-4b2b-4501-8ded-34dff1278a0c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.768495] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.768769] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e2f1684-15f8-4f24-a6f0-232c5677931a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.780547] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 992.780547] env[68233]: value = "task-2782791" [ 992.780547] env[68233]: _type = "Task" [ 992.780547] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.788830] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.877132] env[68233]: DEBUG nova.network.neutron [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.922645] env[68233]: DEBUG nova.network.neutron [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Updated VIF entry in instance network info cache for port 4836d496-c0c3-42a6-8b3c-e86a6660174e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 992.923107] env[68233]: DEBUG nova.network.neutron [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Updating instance_info_cache with network_info: [{"id": "4836d496-c0c3-42a6-8b3c-e86a6660174e", "address": "fa:16:3e:8e:c7:7a", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4836d496-c0", "ovs_interfaceid": "4836d496-c0c3-42a6-8b3c-e86a6660174e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.937249] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52188cb7-d2c9-6312-5fad-4c385bd3c3f7, 'name': SearchDatastore_Task, 'duration_secs': 0.027405} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.938163] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.938441] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 992.938680] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.938825] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.938997] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.939300] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.939597] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 992.940100] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2440857-784a-489f-9d65-f138f75df541 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.942033] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b3241d9-7d4d-4a06-b3bb-1ef95f1755e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.948669] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 992.948669] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c08a9c-842a-dd59-4656-ab47a9997c78" [ 992.948669] env[68233]: _type = "Task" [ 992.948669] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.953699] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.953873] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 992.957112] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8b0f9ee-319c-4c50-9c7a-a9e538a6e2c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.959157] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c08a9c-842a-dd59-4656-ab47a9997c78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.962333] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 992.962333] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d0aa94-7bff-47ad-a215-5103e74db95a" [ 992.962333] env[68233]: _type = "Task" [ 992.962333] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.970474] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d0aa94-7bff-47ad-a215-5103e74db95a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.006133] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3ee683e8-e10b-4412-b3dd-d393ed0608d0 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.007798] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 993.010237] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.386s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.011837] env[68233]: INFO nova.compute.claims [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 993.033509] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 993.062256] env[68233]: DEBUG nova.network.neutron [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Successfully created port: 6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 993.289974] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.37947} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.290272] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.290921] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbd5ab9-c5d5-426e-a3b6-69af3baa8fea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.312723] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Reconfiguring VM instance instance-0000005a to attach disk [datastore2] 56fb49f0-4b2b-4501-8ded-34dff1278a0c/56fb49f0-4b2b-4501-8ded-34dff1278a0c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.313053] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3942a04f-c9cb-4f0c-abe1-e4dc23abcce1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.336591] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 993.336591] env[68233]: value = "task-2782792" [ 993.336591] env[68233]: _type = "Task" [ 993.336591] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.345203] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782792, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.380113] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.428650] env[68233]: DEBUG oslo_concurrency.lockutils [req-5a98bc50-96b1-49fe-943e-6c84bf9d6710 req-7e19cbee-59ba-43b3-860d-97aaf4dc234c service nova] Releasing lock "refresh_cache-85313d15-04da-4f24-b203-bed5ebcbe1a9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.460402] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c08a9c-842a-dd59-4656-ab47a9997c78, 'name': SearchDatastore_Task, 'duration_secs': 0.022543} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.460684] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.460907] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.461124] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.471810] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d0aa94-7bff-47ad-a215-5103e74db95a, 'name': SearchDatastore_Task, 'duration_secs': 0.042247} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.476619] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3393b23-3953-4305-a500-333378a81852 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.482480] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 993.482480] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520d9ce3-5db5-6219-e9e1-a14cf5bb498a" [ 993.482480] env[68233]: _type = "Task" [ 993.482480] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.490453] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520d9ce3-5db5-6219-e9e1-a14cf5bb498a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.557315] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.572764] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.573334] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1f19acb-5d85-4d1d-939b-0d2d12878782 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.581039] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 993.581039] env[68233]: value = "task-2782793" [ 993.581039] env[68233]: _type = "Task" [ 993.581039] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.591570] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.711906] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 993.712976] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb55e38-b84b-4cae-b727-d75357fb07ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.721185] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 993.721494] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-577bf65f-ce36-4f1e-b47d-0e994a50b71f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.785975] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 993.786726] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 993.789017] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleting the datastore file [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 993.789017] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e9aa02e-b730-46b1-9542-f05f9574b224 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.795267] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 993.795267] env[68233]: value = "task-2782795" [ 993.795267] env[68233]: _type = "Task" [ 993.795267] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.801898] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782795, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.846455] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782792, 'name': ReconfigVM_Task, 'duration_secs': 0.341427} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.846822] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Reconfigured VM instance instance-0000005a to attach disk [datastore2] 56fb49f0-4b2b-4501-8ded-34dff1278a0c/56fb49f0-4b2b-4501-8ded-34dff1278a0c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 993.847561] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87fccc74-4fb5-46e2-8f2a-d1aa85d298e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.853481] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 993.853481] env[68233]: value = "task-2782796" [ 993.853481] env[68233]: _type = "Task" [ 993.853481] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.861191] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782796, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.993766] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520d9ce3-5db5-6219-e9e1-a14cf5bb498a, 'name': SearchDatastore_Task, 'duration_secs': 0.01073} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.994049] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.994311] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 993.994589] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.994772] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.995027] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4644dc06-84f4-4c29-8f05-f94bd48d94fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.996993] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64af89d8-a647-4b3c-a9d7-63c0b15bbbd5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.003077] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 994.003077] env[68233]: value = "task-2782797" [ 994.003077] env[68233]: _type = "Task" [ 994.003077] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.006608] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.006777] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.007939] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96f22e8e-2562-4813-93d3-3d8a87d3459f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.012447] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.015310] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 994.015310] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5209633e-dc6f-f63c-dc69-1da58600544c" [ 994.015310] env[68233]: _type = "Task" [ 994.015310] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.025608] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 994.027458] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5209633e-dc6f-f63c-dc69-1da58600544c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.054430] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 994.054831] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 994.054951] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 994.055178] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 994.055330] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 994.055478] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 994.055685] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 994.055866] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 994.056046] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 994.056214] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 994.056387] env[68233]: DEBUG nova.virt.hardware [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 994.058167] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb43d1a-f14f-4014-bc71-049073ddc80f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.067771] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f730b7f8-f1f2-4413-8198-bf999c83cdae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.092268] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782793, 'name': PowerOffVM_Task, 'duration_secs': 0.173404} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.092536] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.093348] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391ad55e-a06f-4895-a1b7-f40c2336bd95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.119083] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca245fd-2c4a-42f1-adb1-5c8fc560f02c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.305416] env[68233]: DEBUG oslo_vmware.api [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782795, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134556} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.308060] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 994.308280] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 994.308454] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 994.327072] env[68233]: INFO nova.scheduler.client.report [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted allocations for instance dca145c8-ed95-4dfb-9534-37035c75dafb [ 994.334513] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1674d60-36e5-4f18-849d-e4e360046977 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.342786] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cf76f0-0895-46f6-8bfd-605e73580254 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.376867] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad108fb-4bac-4649-ac0a-c6cc3bfc0590 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.385672] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782796, 'name': Rename_Task, 'duration_secs': 0.142578} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.388040] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.388346] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a425412-8f7e-4e86-b47b-1c2fd044ea08 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.391010] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c30a0686-aef6-4d95-8476-a410d0c12910 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.408118] env[68233]: DEBUG nova.compute.provider_tree [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.410898] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 994.410898] env[68233]: value = "task-2782798" [ 994.410898] env[68233]: _type = "Task" [ 994.410898] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.421411] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782798, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.512733] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782797, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466131} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.513119] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 994.513391] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.513687] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95613f7e-8993-4e7d-971b-4ccfbf531eb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.524036] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5209633e-dc6f-f63c-dc69-1da58600544c, 'name': SearchDatastore_Task, 'duration_secs': 0.007922} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.525736] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 994.525736] env[68233]: value = "task-2782799" [ 994.525736] env[68233]: _type = "Task" [ 994.525736] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.526048] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5db26470-f915-4979-b4a5-a5e6d8d88bd9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.533188] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 994.533188] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526ae6f4-5d2a-0111-d24c-e407f2f09524" [ 994.533188] env[68233]: _type = "Task" [ 994.533188] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.536237] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782799, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.545645] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526ae6f4-5d2a-0111-d24c-e407f2f09524, 'name': SearchDatastore_Task, 'duration_secs': 0.007812} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.545928] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.546423] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.546710] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd729cd8-03fe-43da-9040-ac2fcad8bbaf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.553042] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 994.553042] env[68233]: value = "task-2782800" [ 994.553042] env[68233]: _type = "Task" [ 994.553042] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.560692] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782800, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.629598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 994.629945] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-95e07cb3-e4fb-4826-b370-5a79f350f568 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.637358] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 994.637358] env[68233]: value = "task-2782801" [ 994.637358] env[68233]: _type = "Task" [ 994.637358] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.645786] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782801, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.797049] env[68233]: DEBUG nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-vif-unplugged-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 994.797341] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.797586] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.797586] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.797858] env[68233]: DEBUG nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] No waiting events found dispatching network-vif-unplugged-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.797913] env[68233]: WARNING nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received unexpected event network-vif-unplugged-d9478083-21a3-4b61-ab65-e1281b8bac7b for instance with vm_state shelved_offloaded and task_state None. [ 994.798046] env[68233]: DEBUG nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-changed-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 994.798218] env[68233]: DEBUG nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Refreshing instance network info cache due to event network-changed-d9478083-21a3-4b61-ab65-e1281b8bac7b. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 994.798399] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.798535] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.798821] env[68233]: DEBUG nova.network.neutron [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Refreshing network info cache for port d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 994.831186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.913056] env[68233]: DEBUG nova.scheduler.client.report [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.926357] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782798, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.946864] env[68233]: DEBUG nova.network.neutron [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Successfully updated port: 6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 995.036957] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782799, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06724} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.037285] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.038191] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7253478f-7409-4b6c-b6d7-58edf4087428 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.061620] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.062545] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49e89b16-39c8-42e8-b33e-b9446f67a61f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.085405] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782800, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.086663] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 995.086663] env[68233]: value = "task-2782802" [ 995.086663] env[68233]: _type = "Task" [ 995.086663] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.147737] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782801, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.420772] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.421295] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 995.429701] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.267s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 995.429701] env[68233]: DEBUG nova.objects.instance [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lazy-loading 'resources' on Instance uuid edf4bfac-175b-40b7-bf08-298c4735bfae {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.429701] env[68233]: DEBUG oslo_vmware.api [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782798, 'name': PowerOnVM_Task, 'duration_secs': 0.565376} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.429701] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.429701] env[68233]: INFO nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Took 8.22 seconds to spawn the instance on the hypervisor. [ 995.429701] env[68233]: DEBUG nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.430495] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ba0fc8-0327-4fd6-8e36-c058c46c597a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.449089] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "refresh_cache-d926386c-8543-4a6e-a782-588680cb5f34" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.449287] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired lock "refresh_cache-d926386c-8543-4a6e-a782-588680cb5f34" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.449366] env[68233]: DEBUG nova.network.neutron [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 995.574177] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782800, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55509} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.574500] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.574712] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.574999] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9d5acb3-5283-4576-8c79-04c052f42309 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.582176] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 995.582176] env[68233]: value = "task-2782803" [ 995.582176] env[68233]: _type = "Task" [ 995.582176] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.595347] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.600233] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782802, 'name': ReconfigVM_Task, 'duration_secs': 0.286451} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.600525] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 995.601151] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb141a55-493f-4de2-a471-f4f7da90e178 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.606742] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 995.606742] env[68233]: value = "task-2782804" [ 995.606742] env[68233]: _type = "Task" [ 995.606742] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.614722] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782804, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.648194] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782801, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.833782] env[68233]: DEBUG nova.network.neutron [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updated VIF entry in instance network info cache for port d9478083-21a3-4b61-ab65-e1281b8bac7b. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.834081] env[68233]: DEBUG nova.network.neutron [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd9478083-21", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.931485] env[68233]: DEBUG nova.compute.utils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 995.936041] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 995.936041] env[68233]: DEBUG nova.network.neutron [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.952818] env[68233]: INFO nova.compute.manager [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Took 13.92 seconds to build instance. [ 996.015519] env[68233]: DEBUG nova.network.neutron [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 996.032623] env[68233]: DEBUG nova.policy [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd88a29958ef4d92aaa7ba32a35bdf5a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '68f200493d6342139ab72e2a013e5780', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 996.093768] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.146528} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.094064] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.094874] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f743a8-3573-47a2-9fe0-f5350bfbaf57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.121703] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.129190] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c1e5c197-56a2-4f81-9bec-4663ecf9c0cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.150694] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782804, 'name': Rename_Task, 'duration_secs': 0.136548} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.151338] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.152664] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-581ccf0d-c25c-4869-ad9a-88e611edd922 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.154359] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 996.154359] env[68233]: value = "task-2782805" [ 996.154359] env[68233]: _type = "Task" [ 996.154359] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.160138] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782801, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.169074] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782805, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.170373] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 996.170373] env[68233]: value = "task-2782806" [ 996.170373] env[68233]: _type = "Task" [ 996.170373] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.181208] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782806, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.232020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83142b29-6636-48bc-9795-13a4e2be174d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.239714] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb5022d-3806-46cf-8b1e-c738f7708618 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.270642] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f5046c6-4668-480f-970d-df9baea22ae3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.282304] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db6f755-96b4-4cb8-91e0-74b19dd47215 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.298635] env[68233]: DEBUG nova.compute.provider_tree [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.336667] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.336966] env[68233]: DEBUG nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Received event network-vif-plugged-6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 996.337193] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Acquiring lock "d926386c-8543-4a6e-a782-588680cb5f34-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.337395] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Lock "d926386c-8543-4a6e-a782-588680cb5f34-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.337555] env[68233]: DEBUG oslo_concurrency.lockutils [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] Lock "d926386c-8543-4a6e-a782-588680cb5f34-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.337744] env[68233]: DEBUG nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] No waiting events found dispatching network-vif-plugged-6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 996.337927] env[68233]: WARNING nova.compute.manager [req-16872458-6006-429d-8ed4-3a552a46350f req-3407b5f6-175a-415e-aab0-307c9703b987 service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Received unexpected event network-vif-plugged-6efe483d-3f8f-4e12-9dd1-50d94f84ce17 for instance with vm_state building and task_state spawning. [ 996.402880] env[68233]: DEBUG nova.network.neutron [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Updating instance_info_cache with network_info: [{"id": "6efe483d-3f8f-4e12-9dd1-50d94f84ce17", "address": "fa:16:3e:89:8f:0b", "network": {"id": "46679e6d-6532-4ab3-a804-ca7ba5c5f007", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-487322929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61a69000592d412587562d2d0f890515", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6efe483d-3f", "ovs_interfaceid": "6efe483d-3f8f-4e12-9dd1-50d94f84ce17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.406902] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.435127] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 996.455503] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c76c4-eba3-44fe-b286-d5225e7150f8 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.429s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.632922] env[68233]: DEBUG nova.network.neutron [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Successfully created port: 4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 996.659097] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782801, 'name': CreateSnapshot_Task, 'duration_secs': 1.552627} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.659762] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 996.660196] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65da63e8-2307-4383-aefa-2c043d8e4169 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.675840] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782805, 'name': ReconfigVM_Task, 'duration_secs': 0.435827} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.678816] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46/4677d047-f8dc-4501-be9b-14e6a2222f46.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.679694] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1c0a699-0af8-4a16-a70b-664f380583a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.686795] env[68233]: DEBUG oslo_vmware.api [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782806, 'name': PowerOnVM_Task, 'duration_secs': 0.464848} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.687986] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 996.688213] env[68233]: INFO nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Took 7.05 seconds to spawn the instance on the hypervisor. [ 996.688394] env[68233]: DEBUG nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.688706] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 996.688706] env[68233]: value = "task-2782807" [ 996.688706] env[68233]: _type = "Task" [ 996.688706] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.689440] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3942267c-7e98-4bc8-8060-6800f4ff0cf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.703815] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782807, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.802711] env[68233]: DEBUG nova.scheduler.client.report [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 996.905751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Releasing lock "refresh_cache-d926386c-8543-4a6e-a782-588680cb5f34" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.906142] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Instance network_info: |[{"id": "6efe483d-3f8f-4e12-9dd1-50d94f84ce17", "address": "fa:16:3e:89:8f:0b", "network": {"id": "46679e6d-6532-4ab3-a804-ca7ba5c5f007", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-487322929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61a69000592d412587562d2d0f890515", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6efe483d-3f", "ovs_interfaceid": "6efe483d-3f8f-4e12-9dd1-50d94f84ce17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 996.906570] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:8f:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '664c466b-9417-49d7-83cc-364d964c403a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6efe483d-3f8f-4e12-9dd1-50d94f84ce17', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 996.914164] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 996.914392] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 996.914643] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1d11c27-2dc1-452c-963f-4f273be0a700 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.932634] env[68233]: DEBUG nova.compute.manager [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Received event network-changed-6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 996.932877] env[68233]: DEBUG nova.compute.manager [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Refreshing instance network info cache due to event network-changed-6efe483d-3f8f-4e12-9dd1-50d94f84ce17. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 996.933182] env[68233]: DEBUG oslo_concurrency.lockutils [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] Acquiring lock "refresh_cache-d926386c-8543-4a6e-a782-588680cb5f34" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.933334] env[68233]: DEBUG oslo_concurrency.lockutils [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] Acquired lock "refresh_cache-d926386c-8543-4a6e-a782-588680cb5f34" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.933527] env[68233]: DEBUG nova.network.neutron [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Refreshing network info cache for port 6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 996.936132] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 996.936132] env[68233]: value = "task-2782808" [ 996.936132] env[68233]: _type = "Task" [ 996.936132] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.950761] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782808, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.072061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.072342] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.072651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 997.072846] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.072950] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.075180] env[68233]: INFO nova.compute.manager [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Terminating instance [ 997.183675] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 997.184091] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-dedd3952-a987-4859-b62c-bb9c7d572c6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.193633] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 997.193633] env[68233]: value = "task-2782809" [ 997.193633] env[68233]: _type = "Task" [ 997.193633] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.212539] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782807, 'name': Rename_Task, 'duration_secs': 0.154061} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.212798] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782809, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.215085] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.215085] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0418e57-cf8b-4af8-a14b-95f8b4201a60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.216752] env[68233]: INFO nova.compute.manager [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Took 14.31 seconds to build instance. [ 997.222317] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 997.222317] env[68233]: value = "task-2782810" [ 997.222317] env[68233]: _type = "Task" [ 997.222317] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.230406] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.308821] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.311698] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.150s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.312035] env[68233]: DEBUG nova.objects.instance [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'resources' on Instance uuid 9c0e581d-5856-470f-a737-301649d701e5 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.336801] env[68233]: INFO nova.scheduler.client.report [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Deleted allocations for instance edf4bfac-175b-40b7-bf08-298c4735bfae [ 997.445958] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 997.451989] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782808, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.480125] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 997.480438] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 997.480617] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 997.480810] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 997.481417] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 997.481417] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 997.481417] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 997.481602] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 997.481647] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 997.481838] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 997.482034] env[68233]: DEBUG nova.virt.hardware [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 997.482927] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321aacc4-6243-4f7d-b0aa-e896c9f0011e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.493053] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92e1154-1159-4591-b0da-059266251e1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.581991] env[68233]: DEBUG nova.compute.manager [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 997.581991] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.582739] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b343970e-7232-4fcf-b6f7-c8ba79ce68e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.590316] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 997.590583] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f7db71f-2484-4dce-9cfa-a0f909f5e873 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.599339] env[68233]: DEBUG oslo_vmware.api [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 997.599339] env[68233]: value = "task-2782811" [ 997.599339] env[68233]: _type = "Task" [ 997.599339] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.611260] env[68233]: DEBUG oslo_vmware.api [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782811, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.710403] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782809, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.719644] env[68233]: DEBUG oslo_concurrency.lockutils [None req-06a6904d-d15e-40d1-82d9-d91cb7c9a2d6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.826s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.736893] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782810, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.785248] env[68233]: DEBUG nova.network.neutron [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Updated VIF entry in instance network info cache for port 6efe483d-3f8f-4e12-9dd1-50d94f84ce17. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 997.785658] env[68233]: DEBUG nova.network.neutron [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Updating instance_info_cache with network_info: [{"id": "6efe483d-3f8f-4e12-9dd1-50d94f84ce17", "address": "fa:16:3e:89:8f:0b", "network": {"id": "46679e6d-6532-4ab3-a804-ca7ba5c5f007", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-487322929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61a69000592d412587562d2d0f890515", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "664c466b-9417-49d7-83cc-364d964c403a", "external-id": "nsx-vlan-transportzone-103", "segmentation_id": 103, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6efe483d-3f", "ovs_interfaceid": "6efe483d-3f8f-4e12-9dd1-50d94f84ce17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.850697] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8d47b14f-b9c9-40b8-979b-a96ab368b5a5 tempest-ImagesTestJSON-1014706764 tempest-ImagesTestJSON-1014706764-project-member] Lock "edf4bfac-175b-40b7-bf08-298c4735bfae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.673s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.951715] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782808, 'name': CreateVM_Task, 'duration_secs': 0.533256} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.953329] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 997.953329] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.953329] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 997.953466] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 997.953654] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f557a7e-3b9d-483f-b46c-5d73a4b02ebf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.959328] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 997.959328] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cc8528-e4b5-21c0-5cef-3b52b9102680" [ 997.959328] env[68233]: _type = "Task" [ 997.959328] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.967528] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cc8528-e4b5-21c0-5cef-3b52b9102680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.084506] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1f4c1d-a205-43ac-99de-936d5b510b54 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.091866] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daff9ca3-26f2-47ef-b321-1bc651c675a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.129359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a2d9e9-41eb-42f8-8c5c-38a0945167f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.136946] env[68233]: DEBUG oslo_vmware.api [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782811, 'name': PowerOffVM_Task, 'duration_secs': 0.519629} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.139186] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 998.139377] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 998.139662] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8830f36-6f74-4a57-b488-d28d859a14b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.142165] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c76e4a64-b7ff-49ce-bda4-6ad53390dcf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.157944] env[68233]: DEBUG nova.compute.provider_tree [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.208229] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782809, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.235187] env[68233]: DEBUG oslo_vmware.api [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782810, 'name': PowerOnVM_Task, 'duration_secs': 0.90242} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.235455] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 998.235651] env[68233]: DEBUG nova.compute.manager [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.236439] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270f3568-6613-4060-818b-b34401ebc382 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.281246] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 998.281425] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 998.281572] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore2] 56fb49f0-4b2b-4501-8ded-34dff1278a0c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 998.281864] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06346905-09d6-4b77-8571-72ebf93ab0b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.288319] env[68233]: DEBUG oslo_concurrency.lockutils [req-998870eb-715e-4a97-b589-906e4a29e379 req-a57d11d7-df34-40fc-9b9e-e32551b6774a service nova] Releasing lock "refresh_cache-d926386c-8543-4a6e-a782-588680cb5f34" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.288790] env[68233]: DEBUG oslo_vmware.api [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 998.288790] env[68233]: value = "task-2782813" [ 998.288790] env[68233]: _type = "Task" [ 998.288790] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.300148] env[68233]: DEBUG oslo_vmware.api [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782813, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.353229] env[68233]: INFO nova.compute.manager [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Rebuilding instance [ 998.404130] env[68233]: DEBUG nova.compute.manager [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 998.405013] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff79224-9f5a-494a-b69f-4518ec0ba330 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.420314] env[68233]: DEBUG nova.network.neutron [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Successfully updated port: 4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 998.470101] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cc8528-e4b5-21c0-5cef-3b52b9102680, 'name': SearchDatastore_Task, 'duration_secs': 0.015958} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.470417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 998.470656] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 998.470894] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.471058] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.471245] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 998.471507] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-315d8fa4-a775-4ace-a52e-fa4646568422 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.479510] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 998.479742] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 998.480566] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c93eb5c-babc-4d30-baaf-2412df314222 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.487474] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 998.487474] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bc777a-2490-4106-9198-5884956380fe" [ 998.487474] env[68233]: _type = "Task" [ 998.487474] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.495239] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bc777a-2490-4106-9198-5884956380fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.661052] env[68233]: DEBUG nova.scheduler.client.report [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 998.707668] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782809, 'name': CloneVM_Task, 'duration_secs': 1.213565} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.707923] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Created linked-clone VM from snapshot [ 998.708650] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0c7c24-5025-4e57-a729-7f8e45ed9b20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.716358] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Uploading image 35b86049-24a9-40ef-b027-4a2c30fd3821 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 998.738396] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 998.738396] env[68233]: value = "vm-559470" [ 998.738396] env[68233]: _type = "VirtualMachine" [ 998.738396] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 998.738983] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-aeecec03-6c83-4d61-9140-e852405c12b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.750811] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lease: (returnval){ [ 998.750811] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3d90b-700e-2685-ce97-f5a1e4e5b09a" [ 998.750811] env[68233]: _type = "HttpNfcLease" [ 998.750811] env[68233]: } obtained for exporting VM: (result){ [ 998.750811] env[68233]: value = "vm-559470" [ 998.750811] env[68233]: _type = "VirtualMachine" [ 998.750811] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 998.755323] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the lease: (returnval){ [ 998.755323] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3d90b-700e-2685-ce97-f5a1e4e5b09a" [ 998.755323] env[68233]: _type = "HttpNfcLease" [ 998.755323] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 998.760320] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.764030] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 998.764030] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3d90b-700e-2685-ce97-f5a1e4e5b09a" [ 998.764030] env[68233]: _type = "HttpNfcLease" [ 998.764030] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 998.797893] env[68233]: DEBUG oslo_vmware.api [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782813, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.371301} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.798511] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.798723] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.798976] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.799184] env[68233]: INFO nova.compute.manager [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Took 1.22 seconds to destroy the instance on the hypervisor. [ 998.799429] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.799623] env[68233]: DEBUG nova.compute.manager [-] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 998.799721] env[68233]: DEBUG nova.network.neutron [-] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.922082] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "refresh_cache-a6b913f8-8ce5-4227-b36c-bc191d2e7907" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.922228] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquired lock "refresh_cache-a6b913f8-8ce5-4227-b36c-bc191d2e7907" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.922371] env[68233]: DEBUG nova.network.neutron [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.968636] env[68233]: DEBUG nova.compute.manager [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Received event network-vif-plugged-4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 998.968636] env[68233]: DEBUG oslo_concurrency.lockutils [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] Acquiring lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.968948] env[68233]: DEBUG oslo_concurrency.lockutils [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.968948] env[68233]: DEBUG oslo_concurrency.lockutils [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.969762] env[68233]: DEBUG nova.compute.manager [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] No waiting events found dispatching network-vif-plugged-4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 998.969762] env[68233]: WARNING nova.compute.manager [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Received unexpected event network-vif-plugged-4b269e98-c864-447a-8970-f8d1f503b4d8 for instance with vm_state building and task_state spawning. [ 998.970103] env[68233]: DEBUG nova.compute.manager [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Received event network-changed-4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 998.970330] env[68233]: DEBUG nova.compute.manager [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Refreshing instance network info cache due to event network-changed-4b269e98-c864-447a-8970-f8d1f503b4d8. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 998.970556] env[68233]: DEBUG oslo_concurrency.lockutils [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] Acquiring lock "refresh_cache-a6b913f8-8ce5-4227-b36c-bc191d2e7907" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.005020] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bc777a-2490-4106-9198-5884956380fe, 'name': SearchDatastore_Task, 'duration_secs': 0.014641} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.005020] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c0322e2-e1c1-481b-81e9-6f68145937d9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.010681] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 999.010681] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6a661-756d-02b5-d0a8-a7883be85222" [ 999.010681] env[68233]: _type = "Task" [ 999.010681] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.020524] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6a661-756d-02b5-d0a8-a7883be85222, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.165673] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.854s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.173114] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.611s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.175334] env[68233]: INFO nova.compute.claims [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.198395] env[68233]: INFO nova.scheduler.client.report [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted allocations for instance 9c0e581d-5856-470f-a737-301649d701e5 [ 999.267562] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 999.267562] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3d90b-700e-2685-ce97-f5a1e4e5b09a" [ 999.267562] env[68233]: _type = "HttpNfcLease" [ 999.267562] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 999.267562] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 999.267562] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c3d90b-700e-2685-ce97-f5a1e4e5b09a" [ 999.267562] env[68233]: _type = "HttpNfcLease" [ 999.267562] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 999.267980] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8180fa4a-d252-47fb-93f7-f3239ae79e1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.275634] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da8298-aea9-9175-f110-f84e3f0a15f3/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 999.277974] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da8298-aea9-9175-f110-f84e3f0a15f3/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 999.383466] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-560911a1-4e22-463c-afdf-a96e3459177f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.422115] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 999.422472] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65e0f893-0d34-4a01-8b78-b24477a1ad1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.429573] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 999.429573] env[68233]: value = "task-2782815" [ 999.429573] env[68233]: _type = "Task" [ 999.429573] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.440750] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782815, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.469183] env[68233]: DEBUG nova.network.neutron [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 999.524253] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6a661-756d-02b5-d0a8-a7883be85222, 'name': SearchDatastore_Task, 'duration_secs': 0.035549} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.524253] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.524253] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d926386c-8543-4a6e-a782-588680cb5f34/d926386c-8543-4a6e-a782-588680cb5f34.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 999.524814] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9b90f48-0238-48e7-b9a7-95cd84672023 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.530698] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 999.530698] env[68233]: value = "task-2782816" [ 999.530698] env[68233]: _type = "Task" [ 999.530698] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.540919] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782816, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.634632] env[68233]: DEBUG nova.network.neutron [-] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.673312] env[68233]: DEBUG nova.network.neutron [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Updating instance_info_cache with network_info: [{"id": "4b269e98-c864-447a-8970-f8d1f503b4d8", "address": "fa:16:3e:5f:69:70", "network": {"id": "9fdc750a-f078-426c-88d4-24882871ed53", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-777503218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68f200493d6342139ab72e2a013e5780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b269e98-c8", "ovs_interfaceid": "4b269e98-c864-447a-8970-f8d1f503b4d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.711893] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8cc854-fc26-462a-ae5b-b5d02a04115d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "9c0e581d-5856-470f-a737-301649d701e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.902s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.943291] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782815, 'name': PowerOffVM_Task, 'duration_secs': 0.309492} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.944126] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 999.945773] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 999.945773] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5c79cf-fc9b-4fde-b23f-eade78bc0df0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.954608] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 999.954972] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21219629-b793-4cc7-b97a-faffbb70eccd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.044442] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782816, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.046141] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1000.046473] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1000.047386] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1000.047386] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-299c5149-bc11-45b8-8e7b-63ae04da1fcf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.054217] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1000.054217] env[68233]: value = "task-2782818" [ 1000.054217] env[68233]: _type = "Task" [ 1000.054217] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.064406] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782818, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.140025] env[68233]: INFO nova.compute.manager [-] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Took 1.34 seconds to deallocate network for instance. [ 1000.178675] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Releasing lock "refresh_cache-a6b913f8-8ce5-4227-b36c-bc191d2e7907" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.179084] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Instance network_info: |[{"id": "4b269e98-c864-447a-8970-f8d1f503b4d8", "address": "fa:16:3e:5f:69:70", "network": {"id": "9fdc750a-f078-426c-88d4-24882871ed53", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-777503218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68f200493d6342139ab72e2a013e5780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b269e98-c8", "ovs_interfaceid": "4b269e98-c864-447a-8970-f8d1f503b4d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1000.179459] env[68233]: DEBUG oslo_concurrency.lockutils [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] Acquired lock "refresh_cache-a6b913f8-8ce5-4227-b36c-bc191d2e7907" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.179820] env[68233]: DEBUG nova.network.neutron [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Refreshing network info cache for port 4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1000.181551] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5f:69:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b9aabc7c-0f6c-42eb-bd27-493a1496c0c8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4b269e98-c864-447a-8970-f8d1f503b4d8', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1000.190248] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Creating folder: Project (68f200493d6342139ab72e2a013e5780). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1000.194564] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a248e14c-973a-49b6-a806-848e47b8ddf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.206982] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Created folder: Project (68f200493d6342139ab72e2a013e5780) in parent group-v559223. [ 1000.209076] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Creating folder: Instances. Parent ref: group-v559471. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1000.209076] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-03606334-44ee-48a0-8c09-814522b13ca5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.219113] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Created folder: Instances in parent group-v559471. [ 1000.219339] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1000.219822] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1000.220258] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74c5e4b8-90f0-44e1-8b52-9ff2b26ea4f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.246814] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1000.246814] env[68233]: value = "task-2782821" [ 1000.246814] env[68233]: _type = "Task" [ 1000.246814] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.258703] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782821, 'name': CreateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.523521] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b54806b-f58a-4d6b-8e23-1f99b7159ba4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.539136] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba252e23-1bb1-426e-898d-c0c957c6b34f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.549586] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782816, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615104} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.582423] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] d926386c-8543-4a6e-a782-588680cb5f34/d926386c-8543-4a6e-a782-588680cb5f34.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1000.582738] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1000.583444] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9a674e7f-36bc-4c0e-956d-5e5037b7ffde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.590924] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4ecf07-fc5a-40ad-9c5b-09fc2c4bfd1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.599950] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782818, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243165} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.604873] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1000.604873] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1000.604873] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1000.611052] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1000.611052] env[68233]: value = "task-2782822" [ 1000.611052] env[68233]: _type = "Task" [ 1000.611052] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.611052] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fca72c1-c0d4-4ec2-8054-fd6788bded8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.633405] env[68233]: DEBUG nova.compute.provider_tree [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.640306] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.645699] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.757903] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782821, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.993814] env[68233]: DEBUG nova.compute.manager [req-df69390d-fe05-4ff5-be18-fa67d449d5ca req-de6b2db8-ad41-4c59-89aa-73f5f8575578 service nova] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Received event network-vif-deleted-5d6c2240-95b2-4748-bb07-13b4092e246a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1000.995393] env[68233]: DEBUG nova.network.neutron [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Updated VIF entry in instance network info cache for port 4b269e98-c864-447a-8970-f8d1f503b4d8. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.995785] env[68233]: DEBUG nova.network.neutron [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Updating instance_info_cache with network_info: [{"id": "4b269e98-c864-447a-8970-f8d1f503b4d8", "address": "fa:16:3e:5f:69:70", "network": {"id": "9fdc750a-f078-426c-88d4-24882871ed53", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-777503218-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "68f200493d6342139ab72e2a013e5780", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b9aabc7c-0f6c-42eb-bd27-493a1496c0c8", "external-id": "nsx-vlan-transportzone-368", "segmentation_id": 368, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4b269e98-c8", "ovs_interfaceid": "4b269e98-c864-447a-8970-f8d1f503b4d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1001.125410] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132669} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.125410] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1001.126279] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21402496-8f72-4b1e-a171-bf0575f0066e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.141642] env[68233]: DEBUG nova.scheduler.client.report [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.155319] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] d926386c-8543-4a6e-a782-588680cb5f34/d926386c-8543-4a6e-a782-588680cb5f34.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1001.157902] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-330f30db-c55a-4cec-a3d4-b443698a7920 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.185895] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1001.185895] env[68233]: value = "task-2782823" [ 1001.185895] env[68233]: _type = "Task" [ 1001.185895] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.198879] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782823, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.261331] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782821, 'name': CreateVM_Task, 'duration_secs': 0.603563} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.261331] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1001.261331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.261331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.261331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1001.261331] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7f208de-6b35-4ee3-9b4f-38e479bfcd74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.268811] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1001.268811] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bf1b2d-0d1d-2cf7-6fcd-62a74bf1208c" [ 1001.268811] env[68233]: _type = "Task" [ 1001.268811] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.281615] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bf1b2d-0d1d-2cf7-6fcd-62a74bf1208c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.502019] env[68233]: DEBUG oslo_concurrency.lockutils [req-7b6bbf71-8183-4266-89a7-023f5e32869c req-5b2aee5f-c7a5-4167-b2e8-e1951aa8d8b0 service nova] Releasing lock "refresh_cache-a6b913f8-8ce5-4227-b36c-bc191d2e7907" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.643776] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1001.644313] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1001.644516] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1001.644707] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1001.644861] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1001.645128] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1001.645416] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1001.645624] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1001.645847] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1001.646126] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1001.646311] env[68233]: DEBUG nova.virt.hardware [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1001.647228] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7473e5f5-9d61-480f-a98d-1765ca202642 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.656603] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290a8144-27df-4460-9e32-f3f63d428bbd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.662343] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.662924] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1001.665925] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.835s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1001.666193] env[68233]: DEBUG nova.objects.instance [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lazy-loading 'resources' on Instance uuid dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1001.682030] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:c7:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4836d496-c0c3-42a6-8b3c-e86a6660174e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.689610] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.690943] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.694449] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ba04d3e-d057-46c6-94a3-91b2c1995af9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.722530] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.723665] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.723665] env[68233]: value = "task-2782824" [ 1001.723665] env[68233]: _type = "Task" [ 1001.723665] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.732140] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782824, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.782871] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bf1b2d-0d1d-2cf7-6fcd-62a74bf1208c, 'name': SearchDatastore_Task, 'duration_secs': 0.025823} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.782871] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.782871] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.782871] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.782871] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.782871] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.782871] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-05147322-ff8c-4042-8729-3977bc47fb3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.792354] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.792354] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.792354] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5806e019-44cb-49f4-87c1-960a90287655 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.798587] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1001.798587] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d9fefa-3ad8-fdd3-034b-8545092ce053" [ 1001.798587] env[68233]: _type = "Task" [ 1001.798587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.808780] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d9fefa-3ad8-fdd3-034b-8545092ce053, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.985985] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.985985] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.175517] env[68233]: DEBUG nova.compute.utils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1002.178044] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1002.178767] env[68233]: DEBUG nova.network.neutron [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1002.190788] env[68233]: DEBUG nova.objects.instance [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lazy-loading 'numa_topology' on Instance uuid dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1002.207252] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782823, 'name': ReconfigVM_Task, 'duration_secs': 0.695699} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.208307] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Reconfigured VM instance instance-0000005c to attach disk [datastore2] d926386c-8543-4a6e-a782-588680cb5f34/d926386c-8543-4a6e-a782-588680cb5f34.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1002.209108] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-715724b9-fb9e-4ad9-9849-1a079237122a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.219276] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1002.219276] env[68233]: value = "task-2782825" [ 1002.219276] env[68233]: _type = "Task" [ 1002.219276] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.233635] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782825, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.237968] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782824, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.260780] env[68233]: DEBUG nova.policy [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1002.433283] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d9fefa-3ad8-fdd3-034b-8545092ce053, 'name': SearchDatastore_Task, 'duration_secs': 0.016297} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.433283] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ff411ab-810c-453b-9371-70b7e6230761 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.433283] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1002.433283] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6b099-e122-4be8-96d1-d7ae083d48ef" [ 1002.433283] env[68233]: _type = "Task" [ 1002.433283] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.433283] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6b099-e122-4be8-96d1-d7ae083d48ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.489106] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.678334] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1002.694724] env[68233]: DEBUG nova.objects.base [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1002.737348] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782825, 'name': Rename_Task, 'duration_secs': 0.161134} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.738472] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1002.738472] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4071afab-63c2-44a8-b811-91f0857853d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.743492] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782824, 'name': CreateVM_Task, 'duration_secs': 0.530905} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.744060] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.744765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.745404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.745404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1002.745580] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b47089e6-e8c8-45cd-9015-d8f51cbafea5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.752577] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1002.752577] env[68233]: value = "task-2782826" [ 1002.752577] env[68233]: _type = "Task" [ 1002.752577] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.752956] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1002.752956] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5256217b-e635-b7bb-f03a-092b27566843" [ 1002.752956] env[68233]: _type = "Task" [ 1002.752956] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.764122] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782826, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.771085] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5256217b-e635-b7bb-f03a-092b27566843, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.839278] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b6b099-e122-4be8-96d1-d7ae083d48ef, 'name': SearchDatastore_Task, 'duration_secs': 0.012523} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.839558] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.839815] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] a6b913f8-8ce5-4227-b36c-bc191d2e7907/a6b913f8-8ce5-4227-b36c-bc191d2e7907.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1002.840097] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cecb1c56-9932-4ba4-855f-b74eea081839 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.851711] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1002.851711] env[68233]: value = "task-2782827" [ 1002.851711] env[68233]: _type = "Task" [ 1002.851711] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.860288] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782827, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.011285] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.046773] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4842a6cf-1e57-4d39-ad74-98833d6b8489 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.055707] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cc6f59-47fa-439a-83e3-3ca6b17c7e19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.094466] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85dd9c76-b484-4097-9c1a-c743899587fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.102522] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a601855b-f431-4795-a695-08dd53e06a93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.119251] env[68233]: DEBUG nova.compute.provider_tree [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.275325] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782826, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.283651] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5256217b-e635-b7bb-f03a-092b27566843, 'name': SearchDatastore_Task, 'duration_secs': 0.024841} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.284293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.284692] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.287595] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.287595] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.287595] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.287595] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8352aac-edbe-4717-b56e-3b263a022b5e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.302255] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.302504] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1003.303569] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca9a62b-f1c5-40fe-9169-29285119722d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.309961] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1003.309961] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a7fef1-3bca-1669-d255-230ff4acbed1" [ 1003.309961] env[68233]: _type = "Task" [ 1003.309961] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.318807] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a7fef1-3bca-1669-d255-230ff4acbed1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.362319] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782827, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.388750] env[68233]: DEBUG nova.network.neutron [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Successfully created port: 7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.624103] env[68233]: DEBUG nova.scheduler.client.report [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1003.692579] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1003.723094] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1003.723405] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1003.723572] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1003.723815] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1003.723954] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1003.724150] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1003.724396] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1003.724564] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1003.724738] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1003.724903] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1003.725091] env[68233]: DEBUG nova.virt.hardware [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1003.726053] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce9d57b-923d-48de-9670-05d1570e69d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.737888] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbfe7e7-f639-401c-810d-a86b3b024855 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.761167] env[68233]: DEBUG oslo_vmware.api [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782826, 'name': PowerOnVM_Task, 'duration_secs': 0.550362} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.761495] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1003.761750] env[68233]: INFO nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1003.761949] env[68233]: DEBUG nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.762835] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6054820-f7fe-457c-9147-308c55174b5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.822752] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a7fef1-3bca-1669-d255-230ff4acbed1, 'name': SearchDatastore_Task, 'duration_secs': 0.058023} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.826616] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80a47988-48d9-4994-9d80-4a5558fcfa9a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.832913] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1003.832913] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52242e2a-75b9-829e-e43e-69ad276573f8" [ 1003.832913] env[68233]: _type = "Task" [ 1003.832913] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.842843] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52242e2a-75b9-829e-e43e-69ad276573f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.864112] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782827, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.691491} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.864399] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] a6b913f8-8ce5-4227-b36c-bc191d2e7907/a6b913f8-8ce5-4227-b36c-bc191d2e7907.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1003.864617] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1003.865143] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e036da28-b8e5-4d23-8ba5-0668cadef5b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.871948] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1003.871948] env[68233]: value = "task-2782828" [ 1003.871948] env[68233]: _type = "Task" [ 1003.871948] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.883465] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782828, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.087912] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.088054] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.129741] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.464s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.134497] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.374s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.134849] env[68233]: DEBUG nova.objects.instance [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1004.285241] env[68233]: INFO nova.compute.manager [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Took 16.31 seconds to build instance. [ 1004.343586] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52242e2a-75b9-829e-e43e-69ad276573f8, 'name': SearchDatastore_Task, 'duration_secs': 0.024617} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.345109] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.345109] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1004.345109] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e41675e3-696d-4497-b614-643c56a8af15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.351570] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1004.351570] env[68233]: value = "task-2782829" [ 1004.351570] env[68233]: _type = "Task" [ 1004.351570] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.360327] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.381054] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782828, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182843} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.381330] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1004.382218] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f161824a-752c-4cfd-a0e3-56d064b88bfe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.404305] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] a6b913f8-8ce5-4227-b36c-bc191d2e7907/a6b913f8-8ce5-4227-b36c-bc191d2e7907.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1004.404616] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef5760e9-507a-411c-85c3-520152016e86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.424265] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1004.424265] env[68233]: value = "task-2782830" [ 1004.424265] env[68233]: _type = "Task" [ 1004.424265] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.432954] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.595289] env[68233]: INFO nova.compute.manager [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Detaching volume 966710f3-d7e3-4a95-bd4c-e592a39ff63d [ 1004.641644] env[68233]: INFO nova.virt.block_device [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Attempting to driver detach volume 966710f3-d7e3-4a95-bd4c-e592a39ff63d from mountpoint /dev/sdb [ 1004.641928] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1004.642144] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559444', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'name': 'volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '13972b73-8bae-4a2a-a987-b6177381e7c8', 'attached_at': '', 'detached_at': '', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'serial': '966710f3-d7e3-4a95-bd4c-e592a39ff63d'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1004.642663] env[68233]: DEBUG oslo_concurrency.lockutils [None req-71dfa7e8-76c4-41ac-b8e3-ae91d2929874 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.444s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.644416] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8ffe14-0d8f-4586-b80c-5cdccd6b24d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.648573] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 8.242s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.648749] env[68233]: INFO nova.compute.manager [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Unshelving [ 1004.678163] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f65ecf-b454-49a5-890d-cb423f191b63 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.686424] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6d1ab2-ad6c-4009-8202-65b03342727d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.709512] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ae9304-25f7-4804-9479-0b22deb6a4bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.724824] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] The volume has not been displaced from its original location: [datastore2] volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d/volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1004.732032] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfiguring VM instance instance-0000002f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1004.732032] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5bd64de8-fb0c-4cbc-b139-ad15fa2edf3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.751610] env[68233]: DEBUG oslo_vmware.api [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1004.751610] env[68233]: value = "task-2782831" [ 1004.751610] env[68233]: _type = "Task" [ 1004.751610] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.762533] env[68233]: DEBUG oslo_vmware.api [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782831, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.785030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.785294] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.787088] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2612f3c-2b0a-4c89-a01f-d8f6453c9e17 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "d926386c-8543-4a6e-a782-588680cb5f34" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.820s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.821922] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.822185] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.863374] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782829, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.870061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "619230c4-f642-4835-8c5a-84ece6610e0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.870316] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "619230c4-f642-4835-8c5a-84ece6610e0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.934674] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782830, 'name': ReconfigVM_Task, 'duration_secs': 0.336731} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.934985] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Reconfigured VM instance instance-0000005d to attach disk [datastore2] a6b913f8-8ce5-4227-b36c-bc191d2e7907/a6b913f8-8ce5-4227-b36c-bc191d2e7907.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1004.935698] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4cac45f0-960f-458a-a4e4-48ffac82b7c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.943066] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1004.943066] env[68233]: value = "task-2782832" [ 1004.943066] env[68233]: _type = "Task" [ 1004.943066] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.952765] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782832, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.033846] env[68233]: DEBUG nova.compute.manager [req-51b100cd-ab94-463d-a6d8-341da4d995f4 req-b1627995-6871-47fd-917b-c937343a2806 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-vif-plugged-7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1005.034275] env[68233]: DEBUG oslo_concurrency.lockutils [req-51b100cd-ab94-463d-a6d8-341da4d995f4 req-b1627995-6871-47fd-917b-c937343a2806 service nova] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.034423] env[68233]: DEBUG oslo_concurrency.lockutils [req-51b100cd-ab94-463d-a6d8-341da4d995f4 req-b1627995-6871-47fd-917b-c937343a2806 service nova] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.034622] env[68233]: DEBUG oslo_concurrency.lockutils [req-51b100cd-ab94-463d-a6d8-341da4d995f4 req-b1627995-6871-47fd-917b-c937343a2806 service nova] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.034821] env[68233]: DEBUG nova.compute.manager [req-51b100cd-ab94-463d-a6d8-341da4d995f4 req-b1627995-6871-47fd-917b-c937343a2806 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] No waiting events found dispatching network-vif-plugged-7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1005.035077] env[68233]: WARNING nova.compute.manager [req-51b100cd-ab94-463d-a6d8-341da4d995f4 req-b1627995-6871-47fd-917b-c937343a2806 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received unexpected event network-vif-plugged-7d72ccc2-ee10-4121-9a73-41bc93e7493e for instance with vm_state building and task_state spawning. [ 1005.059339] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.059666] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.059903] env[68233]: INFO nova.compute.manager [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Shelving [ 1005.159868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-decfbd3c-3c15-4c4b-9aa7-48a4a2856559 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.164333] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.519s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.164333] env[68233]: DEBUG nova.objects.instance [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lazy-loading 'resources' on Instance uuid 56fb49f0-4b2b-4501-8ded-34dff1278a0c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.167083] env[68233]: DEBUG nova.network.neutron [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Successfully updated port: 7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.261766] env[68233]: DEBUG oslo_vmware.api [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782831, 'name': ReconfigVM_Task, 'duration_secs': 0.502984} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.262089] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Reconfigured VM instance instance-0000002f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1005.266839] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32e7f92d-3920-483d-abda-1f671a9902d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.281941] env[68233]: DEBUG oslo_vmware.api [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1005.281941] env[68233]: value = "task-2782833" [ 1005.281941] env[68233]: _type = "Task" [ 1005.281941] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.290788] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.293596] env[68233]: DEBUG oslo_vmware.api [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.326916] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.363219] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695934} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.363367] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1005.363596] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1005.363848] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d651f0e-b6c3-428f-a0fc-db65b91f55bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.369961] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1005.369961] env[68233]: value = "task-2782834" [ 1005.369961] env[68233]: _type = "Task" [ 1005.369961] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.378879] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.381742] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782834, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.453187] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782832, 'name': Rename_Task, 'duration_secs': 0.40158} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.453551] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1005.454427] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14683103-48de-4a58-8cc5-cd0b3af30387 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.460505] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1005.460505] env[68233]: value = "task-2782835" [ 1005.460505] env[68233]: _type = "Task" [ 1005.460505] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.468376] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.675672] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.675910] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.676179] env[68233]: DEBUG nova.network.neutron [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1005.685476] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.794322] env[68233]: DEBUG oslo_vmware.api [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782833, 'name': ReconfigVM_Task, 'duration_secs': 0.177439} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.796988] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559444', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'name': 'volume-966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '13972b73-8bae-4a2a-a987-b6177381e7c8', 'attached_at': '', 'detached_at': '', 'volume_id': '966710f3-d7e3-4a95-bd4c-e592a39ff63d', 'serial': '966710f3-d7e3-4a95-bd4c-e592a39ff63d'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1005.814870] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.848844] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.881452] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782834, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121609} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.881766] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.886022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcb25bc-d5c1-47d1-a951-db9996af86cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.912990] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Reconfiguring VM instance instance-0000005b to attach disk [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.916103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.916414] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af348bf2-ef0e-4354-a5d6-bc55629d1c22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.938305] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1005.938305] env[68233]: value = "task-2782836" [ 1005.938305] env[68233]: _type = "Task" [ 1005.938305] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.950752] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782836, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.971498] env[68233]: DEBUG oslo_vmware.api [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782835, 'name': PowerOnVM_Task, 'duration_secs': 0.490721} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.975178] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.975530] env[68233]: INFO nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1005.975802] env[68233]: DEBUG nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.977138] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e926a4-9fc6-4269-8c53-c28ab39f2029 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.021244] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f9f2c7-1efc-4c49-844d-c22707b65e24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.032066] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21034bdc-64ae-4592-be29-4b0889e4b6c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.066426] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64224632-7b1f-4d72-a292-eed9c7bb3705 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.069637] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.069961] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eca33d3f-493c-4a22-8d12-5a7dece5df8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.076567] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3490692-9ea3-4031-9ee0-fc8cdc7233e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.082720] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1006.082720] env[68233]: value = "task-2782837" [ 1006.082720] env[68233]: _type = "Task" [ 1006.082720] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.095226] env[68233]: DEBUG nova.compute.provider_tree [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1006.102014] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782837, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.232362] env[68233]: DEBUG nova.network.neutron [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.374949] env[68233]: DEBUG nova.objects.instance [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'flavor' on Instance uuid 13972b73-8bae-4a2a-a987-b6177381e7c8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.393264] env[68233]: DEBUG nova.network.neutron [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.448182] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782836, 'name': ReconfigVM_Task, 'duration_secs': 0.462684} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.448477] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Reconfigured VM instance instance-0000005b to attach disk [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9/85313d15-04da-4f24-b203-bed5ebcbe1a9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1006.449167] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eed9f550-db81-4b52-b97d-5b129b624294 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.456330] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1006.456330] env[68233]: value = "task-2782838" [ 1006.456330] env[68233]: _type = "Task" [ 1006.456330] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.466440] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782838, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.495748] env[68233]: INFO nova.compute.manager [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Took 17.89 seconds to build instance. [ 1006.595719] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782837, 'name': PowerOffVM_Task, 'duration_secs': 0.30418} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.595719] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1006.596312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b103c4-818e-49ee-8365-1c5c4fd6fd81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.617703] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42f99fe-f4bc-4321-87b1-0f96d79204a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.621394] env[68233]: ERROR nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [req-bd9b7a81-fb6f-44c6-a30b-d6fbd5a52555] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bd9b7a81-fb6f-44c6-a30b-d6fbd5a52555"}]} [ 1006.638070] env[68233]: DEBUG nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1006.655977] env[68233]: DEBUG nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1006.656349] env[68233]: DEBUG nova.compute.provider_tree [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1006.668635] env[68233]: DEBUG nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1006.689135] env[68233]: DEBUG nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1006.896055] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.896055] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Instance network_info: |[{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1006.896425] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:b3:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d72ccc2-ee10-4121-9a73-41bc93e7493e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1006.904185] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1006.907515] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1006.907965] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83fd739c-c643-4d75-810f-2ec6410141a1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.931105] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1006.931105] env[68233]: value = "task-2782839" [ 1006.931105] env[68233]: _type = "Task" [ 1006.931105] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.942559] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782839, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.965531] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782838, 'name': Rename_Task, 'duration_secs': 0.226206} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.968372] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.968847] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52bbf399-eb92-40ce-93ed-1ccfaf7b47a1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.974824] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1006.974824] env[68233]: value = "task-2782840" [ 1006.974824] env[68233]: _type = "Task" [ 1006.974824] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.984531] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782840, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.992856] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c32ef2b-0e93-4988-9339-c7925c903bab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.997429] env[68233]: DEBUG oslo_concurrency.lockutils [None req-362ede1a-c404-49b7-9f0b-da4e80ade27d tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.405s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.000541] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ada9ce7-a49d-4c98-9ce1-744c04ea3f95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.034955] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "d926386c-8543-4a6e-a782-588680cb5f34" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.035263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "d926386c-8543-4a6e-a782-588680cb5f34" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.035491] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "d926386c-8543-4a6e-a782-588680cb5f34-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.035685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "d926386c-8543-4a6e-a782-588680cb5f34-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.035864] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "d926386c-8543-4a6e-a782-588680cb5f34-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.038269] env[68233]: INFO nova.compute.manager [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Terminating instance [ 1007.040075] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b6a1bc-2500-408f-9746-c67cee776e0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.050901] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fec1531-7ab1-400f-af60-399d6efffa5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.069767] env[68233]: DEBUG nova.compute.provider_tree [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.074020] env[68233]: DEBUG nova.compute.manager [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-changed-7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1007.074020] env[68233]: DEBUG nova.compute.manager [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing instance network info cache due to event network-changed-7d72ccc2-ee10-4121-9a73-41bc93e7493e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1007.074020] env[68233]: DEBUG oslo_concurrency.lockutils [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.074020] env[68233]: DEBUG oslo_concurrency.lockutils [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.074020] env[68233]: DEBUG nova.network.neutron [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing network info cache for port 7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1007.132764] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1007.132878] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3892e38b-80a4-417c-9b2e-03003af8d97b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.141206] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1007.141206] env[68233]: value = "task-2782841" [ 1007.141206] env[68233]: _type = "Task" [ 1007.141206] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.149626] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782841, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.332869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.333177] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.333394] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.333580] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.333756] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.335905] env[68233]: INFO nova.compute.manager [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Terminating instance [ 1007.384821] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d8255a72-237b-4d2d-a62d-c1955f4ed70c tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.297s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.441307] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782839, 'name': CreateVM_Task, 'duration_secs': 0.435855} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.441495] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1007.442267] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.442444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.442765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1007.443101] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e81e938e-62ca-4a0c-86da-ecf12b734b21 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.447891] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1007.447891] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52102c44-3239-e5c6-b85b-bc4981d38d52" [ 1007.447891] env[68233]: _type = "Task" [ 1007.447891] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.457977] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52102c44-3239-e5c6-b85b-bc4981d38d52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.484752] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782840, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.545738] env[68233]: DEBUG nova.compute.manager [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1007.545973] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.546912] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7826ddf2-0a75-42c5-a459-119c0a7f4d57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.554813] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.555109] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71a63d1a-e877-46d1-a3cc-22fc4d7cd983 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.560771] env[68233]: DEBUG oslo_vmware.api [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1007.560771] env[68233]: value = "task-2782842" [ 1007.560771] env[68233]: _type = "Task" [ 1007.560771] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.568445] env[68233]: DEBUG oslo_vmware.api [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.608627] env[68233]: DEBUG nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1007.608869] env[68233]: DEBUG nova.compute.provider_tree [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 118 to 119 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1007.609068] env[68233]: DEBUG nova.compute.provider_tree [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1007.654510] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782841, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.728901] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.729269] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.729499] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "13972b73-8bae-4a2a-a987-b6177381e7c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.729849] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.730112] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.732550] env[68233]: INFO nova.compute.manager [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Terminating instance [ 1007.833140] env[68233]: DEBUG nova.network.neutron [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updated VIF entry in instance network info cache for port 7d72ccc2-ee10-4121-9a73-41bc93e7493e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1007.833428] env[68233]: DEBUG nova.network.neutron [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.839794] env[68233]: DEBUG nova.compute.manager [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1007.839911] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1007.841027] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8965dbb5-e814-4f27-b4ce-58bce166530a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.850867] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.851189] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33812016-50a8-4a99-b7ae-927ebdf4cd1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.859182] env[68233]: DEBUG oslo_vmware.api [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1007.859182] env[68233]: value = "task-2782843" [ 1007.859182] env[68233]: _type = "Task" [ 1007.859182] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.868200] env[68233]: DEBUG oslo_vmware.api [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.959268] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52102c44-3239-e5c6-b85b-bc4981d38d52, 'name': SearchDatastore_Task, 'duration_secs': 0.014251} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.959427] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.959788] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1007.960068] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.960254] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1007.960459] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1007.960789] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46b0afab-fdc5-43ba-ae9f-db1f5d87266d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.970714] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1007.970909] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1007.971693] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acf69a56-cfe7-421e-8ff8-3e6ed9de1306 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.981042] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1007.981042] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ac07a7-7057-f59d-9b23-05601b5fd949" [ 1007.981042] env[68233]: _type = "Task" [ 1007.981042] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.987810] env[68233]: DEBUG oslo_vmware.api [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782840, 'name': PowerOnVM_Task, 'duration_secs': 0.754467} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.989384] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.989662] env[68233]: DEBUG nova.compute.manager [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1007.994068] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caaa93c8-7e25-407f-a4c6-5ce17235355d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.997028] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ac07a7-7057-f59d-9b23-05601b5fd949, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.071058] env[68233]: DEBUG oslo_vmware.api [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782842, 'name': PowerOffVM_Task, 'duration_secs': 0.48922} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.071360] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.071534] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.071852] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62c299fc-b481-4162-a3b7-3ebdc2a621ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.114510] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.951s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.117032] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.106s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.119101] env[68233]: INFO nova.compute.claims [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1008.140161] env[68233]: INFO nova.scheduler.client.report [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 56fb49f0-4b2b-4501-8ded-34dff1278a0c [ 1008.142313] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.142558] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.142857] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Deleting the datastore file [datastore2] d926386c-8543-4a6e-a782-588680cb5f34 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.145929] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6b3f900-9e6b-47c4-93b9-3a0f8a46a8ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.156830] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782841, 'name': CreateSnapshot_Task, 'duration_secs': 0.942124} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.158335] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1008.158441] env[68233]: DEBUG oslo_vmware.api [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1008.158441] env[68233]: value = "task-2782845" [ 1008.158441] env[68233]: _type = "Task" [ 1008.158441] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.159215] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cd335a-10d8-438e-8dcf-8a4c5b300633 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.179799] env[68233]: DEBUG oslo_vmware.api [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782845, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.237061] env[68233]: DEBUG nova.compute.manager [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1008.237061] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.237741] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cc2263-e8ed-47b6-8fee-92eb7fec813d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.245672] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.245879] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a64c8b59-a4b5-49e1-917e-1843d231bd94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.251834] env[68233]: DEBUG oslo_vmware.api [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1008.251834] env[68233]: value = "task-2782846" [ 1008.251834] env[68233]: _type = "Task" [ 1008.251834] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.259515] env[68233]: DEBUG oslo_vmware.api [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.337079] env[68233]: DEBUG oslo_concurrency.lockutils [req-59de3047-7507-4a91-924d-ecb17a0c541f req-720b42a2-ed44-4b90-9eff-a8b9fa0442df service nova] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.370079] env[68233]: DEBUG oslo_vmware.api [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782843, 'name': PowerOffVM_Task, 'duration_secs': 0.217608} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.370379] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.370551] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.370812] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4d11f46-2838-46dd-933a-8b0fddc06274 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.440222] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.440541] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.440665] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Deleting the datastore file [datastore2] a6b913f8-8ce5-4227-b36c-bc191d2e7907 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.440939] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b3ce920-a5bf-4de1-9002-34ec0981bd36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.447693] env[68233]: DEBUG oslo_vmware.api [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for the task: (returnval){ [ 1008.447693] env[68233]: value = "task-2782848" [ 1008.447693] env[68233]: _type = "Task" [ 1008.447693] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.456255] env[68233]: DEBUG oslo_vmware.api [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.491591] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ac07a7-7057-f59d-9b23-05601b5fd949, 'name': SearchDatastore_Task, 'duration_secs': 0.014617} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.492460] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1808e76-dc7c-425e-9ee4-cbb8f4b4e1f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.497992] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1008.497992] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529b2ef6-daf7-be66-40aa-6f11e6f21537" [ 1008.497992] env[68233]: _type = "Task" [ 1008.497992] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.509875] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529b2ef6-daf7-be66-40aa-6f11e6f21537, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.515105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.652028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4cadaddd-d69e-4c5b-b2ed-b5a996f59d97 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "56fb49f0-4b2b-4501-8ded-34dff1278a0c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.579s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.671944] env[68233]: DEBUG oslo_vmware.api [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782845, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256604} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.672241] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.672434] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.672611] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.672853] env[68233]: INFO nova.compute.manager [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1008.673082] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.673278] env[68233]: DEBUG nova.compute.manager [-] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1008.673375] env[68233]: DEBUG nova.network.neutron [-] [instance: d926386c-8543-4a6e-a782-588680cb5f34] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.687466] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1008.687809] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3e28b54b-95bd-4a25-8f1d-fc1445992bce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.695914] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1008.695914] env[68233]: value = "task-2782849" [ 1008.695914] env[68233]: _type = "Task" [ 1008.695914] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.704029] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782849, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.761080] env[68233]: DEBUG oslo_vmware.api [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782846, 'name': PowerOffVM_Task, 'duration_secs': 0.351336} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.762734] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.762734] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1008.762734] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48a7dfc9-d0b9-4a0b-8bbb-e23b50ac0bca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.831132] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1008.831132] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1008.831132] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleting the datastore file [datastore1] 13972b73-8bae-4a2a-a987-b6177381e7c8 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1008.831132] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afa63223-1f2a-47d2-8164-26801a0ab15c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.838051] env[68233]: DEBUG oslo_vmware.api [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1008.838051] env[68233]: value = "task-2782851" [ 1008.838051] env[68233]: _type = "Task" [ 1008.838051] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.849024] env[68233]: DEBUG oslo_vmware.api [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.963047] env[68233]: DEBUG oslo_vmware.api [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Task: {'id': task-2782848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275229} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.963047] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.963047] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.963047] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.963047] env[68233]: INFO nova.compute.manager [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1008.963047] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.963047] env[68233]: DEBUG nova.compute.manager [-] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1008.963047] env[68233]: DEBUG nova.network.neutron [-] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.012215] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529b2ef6-daf7-be66-40aa-6f11e6f21537, 'name': SearchDatastore_Task, 'duration_secs': 0.013797} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.013019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.013351] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0bde10dc-6762-49fb-9c0d-6b104a3cfa39/0bde10dc-6762-49fb-9c0d-6b104a3cfa39.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1009.014194] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-21529d77-596c-4ee4-801e-6696b6ff3c89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.024435] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1009.024435] env[68233]: value = "task-2782852" [ 1009.024435] env[68233]: _type = "Task" [ 1009.024435] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.039875] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782852, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.214217] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782849, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.252209] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "85313d15-04da-4f24-b203-bed5ebcbe1a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.252209] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.252209] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "85313d15-04da-4f24-b203-bed5ebcbe1a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.253563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.253988] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.002s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.256512] env[68233]: INFO nova.compute.manager [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Terminating instance [ 1009.316765] env[68233]: DEBUG nova.compute.manager [req-ad5d1c6d-a7a4-4be8-a8e3-5a94ce9d1c5b req-b7603888-831c-4bff-a2bc-54023fb601bd service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Received event network-vif-deleted-6efe483d-3f8f-4e12-9dd1-50d94f84ce17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1009.318709] env[68233]: INFO nova.compute.manager [req-ad5d1c6d-a7a4-4be8-a8e3-5a94ce9d1c5b req-b7603888-831c-4bff-a2bc-54023fb601bd service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Neutron deleted interface 6efe483d-3f8f-4e12-9dd1-50d94f84ce17; detaching it from the instance and deleting it from the info cache [ 1009.318709] env[68233]: DEBUG nova.network.neutron [req-ad5d1c6d-a7a4-4be8-a8e3-5a94ce9d1c5b req-b7603888-831c-4bff-a2bc-54023fb601bd service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.357495] env[68233]: DEBUG oslo_vmware.api [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.357495] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1009.357495] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1009.358430] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1009.358430] env[68233]: INFO nova.compute.manager [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1009.358583] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.360602] env[68233]: DEBUG nova.compute.manager [-] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1009.360739] env[68233]: DEBUG nova.network.neutron [-] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1009.392670] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da8298-aea9-9175-f110-f84e3f0a15f3/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1009.394194] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a324c129-a709-49a4-b6e5-c6618cc58a86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.404350] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da8298-aea9-9175-f110-f84e3f0a15f3/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1009.404572] env[68233]: ERROR oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da8298-aea9-9175-f110-f84e3f0a15f3/disk-0.vmdk due to incomplete transfer. [ 1009.404852] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c63a1c58-7ac6-4a2e-88c6-718e9d5c9ad2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.416181] env[68233]: DEBUG oslo_vmware.rw_handles [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52da8298-aea9-9175-f110-f84e3f0a15f3/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1009.416439] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Uploaded image 35b86049-24a9-40ef-b027-4a2c30fd3821 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1009.418935] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1009.419240] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3b050f15-646a-4d70-8919-285517b1ba1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.425401] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1009.425401] env[68233]: value = "task-2782853" [ 1009.425401] env[68233]: _type = "Task" [ 1009.425401] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.437892] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782853, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.468410] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1003eb59-507a-4e89-988f-f37ad5d6f8a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.477789] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a34b575-b5a3-4591-a1b4-b7ef39c5dd83 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.520992] env[68233]: DEBUG nova.network.neutron [-] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.524140] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a8d3a7-4661-4f3f-ba74-80c5a3a6d0c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.539456] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e08f4d-ad8c-4cdb-ad21-0cfa4fed6a6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.544319] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782852, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.555358] env[68233]: DEBUG nova.compute.provider_tree [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1009.709103] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782849, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.760554] env[68233]: DEBUG nova.compute.manager [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1009.760790] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.761833] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec294bd-6a67-49a3-b451-3e0716db3b76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.770221] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.770221] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92953542-81de-4f07-af0b-57d4b81768b3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.799317] env[68233]: DEBUG oslo_vmware.api [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1009.799317] env[68233]: value = "task-2782854" [ 1009.799317] env[68233]: _type = "Task" [ 1009.799317] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.799317] env[68233]: DEBUG oslo_vmware.api [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.822601] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53ffdc69-db44-40ee-9f9d-bdb1964f4987 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.833562] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2c23e4d-7b1c-4ac9-8ede-e3823eaecdbe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.882825] env[68233]: DEBUG nova.compute.manager [req-ad5d1c6d-a7a4-4be8-a8e3-5a94ce9d1c5b req-b7603888-831c-4bff-a2bc-54023fb601bd service nova] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Detach interface failed, port_id=6efe483d-3f8f-4e12-9dd1-50d94f84ce17, reason: Instance d926386c-8543-4a6e-a782-588680cb5f34 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1009.936857] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782853, 'name': Destroy_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.998948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.999323] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.024087] env[68233]: INFO nova.compute.manager [-] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Took 1.35 seconds to deallocate network for instance. [ 1010.038127] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782852, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617762} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.038385] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0bde10dc-6762-49fb-9c0d-6b104a3cfa39/0bde10dc-6762-49fb-9c0d-6b104a3cfa39.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.038594] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1010.038937] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-941623b7-3e6e-47f9-a967-b400c4dc06b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.045989] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1010.045989] env[68233]: value = "task-2782855" [ 1010.045989] env[68233]: _type = "Task" [ 1010.045989] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.054108] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782855, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.058207] env[68233]: DEBUG nova.scheduler.client.report [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.108975] env[68233]: DEBUG nova.network.neutron [-] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.209454] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782849, 'name': CloneVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.291707] env[68233]: DEBUG oslo_vmware.api [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782854, 'name': PowerOffVM_Task, 'duration_secs': 0.194405} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.293477] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.293673] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.294593] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc2a17bf-0952-4d03-aacf-45419479c713 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.504985] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1010.509832] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782853, 'name': Destroy_Task, 'duration_secs': 0.725635} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.510342] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Destroyed the VM [ 1010.510590] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1010.510846] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-48c1d03d-0380-4c9d-94ad-eccdd160e106 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.524093] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1010.524093] env[68233]: value = "task-2782857" [ 1010.524093] env[68233]: _type = "Task" [ 1010.524093] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.532623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.533067] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782857, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.556058] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782855, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073379} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.556205] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1010.556982] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5802c6-5d5a-458c-9998-0800856549f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.570995] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.454s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.571544] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1010.583634] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 0bde10dc-6762-49fb-9c0d-6b104a3cfa39/0bde10dc-6762-49fb-9c0d-6b104a3cfa39.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.584336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.899s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.584553] env[68233]: DEBUG nova.objects.instance [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lazy-loading 'pci_requests' on Instance uuid dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.585897] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0319f7b7-5d9f-437f-806b-2afbe016a276 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.603702] env[68233]: DEBUG nova.objects.instance [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lazy-loading 'numa_topology' on Instance uuid dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.616245] env[68233]: INFO nova.compute.manager [-] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Took 1.65 seconds to deallocate network for instance. [ 1010.616903] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1010.616903] env[68233]: value = "task-2782858" [ 1010.616903] env[68233]: _type = "Task" [ 1010.616903] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.630925] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782858, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.641183] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1010.644026] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1010.644026] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] 85313d15-04da-4f24-b203-bed5ebcbe1a9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1010.644026] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13f1f572-405b-4bf2-9546-9a9934b5a716 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.649384] env[68233]: DEBUG oslo_vmware.api [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1010.649384] env[68233]: value = "task-2782859" [ 1010.649384] env[68233]: _type = "Task" [ 1010.649384] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.659359] env[68233]: DEBUG oslo_vmware.api [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.717261] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782849, 'name': CloneVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.796257] env[68233]: DEBUG nova.network.neutron [-] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.031902] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782857, 'name': RemoveSnapshot_Task, 'duration_secs': 0.450483} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.036883] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.037191] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1011.037834] env[68233]: DEBUG nova.compute.manager [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.038295] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c893d4e7-c94a-4e98-a87b-866ee35a9157 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.087025] env[68233]: DEBUG nova.compute.utils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.087977] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1011.088503] env[68233]: DEBUG nova.network.neutron [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.106352] env[68233]: INFO nova.compute.claims [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1011.129899] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782858, 'name': ReconfigVM_Task, 'duration_secs': 0.375922} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.129899] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 0bde10dc-6762-49fb-9c0d-6b104a3cfa39/0bde10dc-6762-49fb-9c0d-6b104a3cfa39.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.131219] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34633b1f-e3d0-4c0f-b2bb-9151a231338e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.135482] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.138737] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1011.138737] env[68233]: value = "task-2782860" [ 1011.138737] env[68233]: _type = "Task" [ 1011.138737] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.147029] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782860, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.158508] env[68233]: DEBUG oslo_vmware.api [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192587} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.158780] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.158916] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.159085] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.159253] env[68233]: INFO nova.compute.manager [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1011.159531] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1011.159649] env[68233]: DEBUG nova.compute.manager [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1011.159742] env[68233]: DEBUG nova.network.neutron [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1011.162841] env[68233]: DEBUG nova.policy [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da4cb00bd4c3405c88d8616b66b71e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14d2a0ead80a4efba8420023c31f8f11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1011.211470] env[68233]: INFO nova.compute.manager [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Rebuilding instance [ 1011.218739] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782849, 'name': CloneVM_Task, 'duration_secs': 2.039061} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.221623] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Created linked-clone VM from snapshot [ 1011.227246] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2282a3-237b-431b-83e1-b868584d5c21 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.231104] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.231366] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.241616] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Uploading image 1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1011.270506] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1011.270506] env[68233]: value = "vm-559477" [ 1011.270506] env[68233]: _type = "VirtualMachine" [ 1011.270506] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1011.271075] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-56d586f2-9e7b-4236-8072-90ced303a934 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.280799] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease: (returnval){ [ 1011.280799] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a79ffc-3329-e6fa-606c-383b50b188a0" [ 1011.280799] env[68233]: _type = "HttpNfcLease" [ 1011.280799] env[68233]: } obtained for exporting VM: (result){ [ 1011.280799] env[68233]: value = "vm-559477" [ 1011.280799] env[68233]: _type = "VirtualMachine" [ 1011.280799] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1011.281079] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the lease: (returnval){ [ 1011.281079] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a79ffc-3329-e6fa-606c-383b50b188a0" [ 1011.281079] env[68233]: _type = "HttpNfcLease" [ 1011.281079] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1011.293573] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1011.293573] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a79ffc-3329-e6fa-606c-383b50b188a0" [ 1011.293573] env[68233]: _type = "HttpNfcLease" [ 1011.293573] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1011.294482] env[68233]: DEBUG nova.compute.manager [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1011.295244] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf8364d-b6a8-45af-a2e0-7aee76601ddd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.297809] env[68233]: INFO nova.compute.manager [-] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Took 1.94 seconds to deallocate network for instance. [ 1011.372517] env[68233]: DEBUG nova.compute.manager [req-428e8c09-e21e-46fc-a56b-4afd2dc487e6 req-e6387748-bc4a-4ca0-8add-0b1d8a3a2311 service nova] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Received event network-vif-deleted-4b269e98-c864-447a-8970-f8d1f503b4d8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1011.372740] env[68233]: DEBUG nova.compute.manager [req-428e8c09-e21e-46fc-a56b-4afd2dc487e6 req-e6387748-bc4a-4ca0-8add-0b1d8a3a2311 service nova] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Received event network-vif-deleted-9041c031-c9af-4931-8450-0b57b0e71c17 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1011.557204] env[68233]: INFO nova.compute.manager [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Shelve offloading [ 1011.591525] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1011.651997] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782860, 'name': Rename_Task, 'duration_secs': 0.149678} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.652881] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1011.653167] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f72a836-de49-488a-b1b4-5958fb047e5c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.659685] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1011.659685] env[68233]: value = "task-2782862" [ 1011.659685] env[68233]: _type = "Task" [ 1011.659685] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.669398] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.693733] env[68233]: DEBUG nova.network.neutron [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Successfully created port: e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1011.736252] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1011.790687] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1011.790687] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a79ffc-3329-e6fa-606c-383b50b188a0" [ 1011.790687] env[68233]: _type = "HttpNfcLease" [ 1011.790687] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1011.790993] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1011.790993] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a79ffc-3329-e6fa-606c-383b50b188a0" [ 1011.790993] env[68233]: _type = "HttpNfcLease" [ 1011.790993] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1011.791724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fedacc3-93b8-4d86-a850-9173d368c6f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.799628] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52168087-0195-f7cd-4bed-f5272f31d9df/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1011.799815] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52168087-0195-f7cd-4bed-f5272f31d9df/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1011.864638] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.913405] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f1c0b793-6656-4d88-87d1-60d4eb39b6cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.061114] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.061428] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c2a55fa-4fb4-43c4-bb71-ab0a0eead531 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.068923] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1012.068923] env[68233]: value = "task-2782863" [ 1012.068923] env[68233]: _type = "Task" [ 1012.068923] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.079646] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1012.079869] env[68233]: DEBUG nova.compute.manager [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.080664] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff621b3-7761-4b36-a6ac-669f0b4d136b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.087232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.087410] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.087583] env[68233]: DEBUG nova.network.neutron [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.146111] env[68233]: DEBUG nova.network.neutron [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.175727] env[68233]: DEBUG oslo_vmware.api [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782862, 'name': PowerOnVM_Task, 'duration_secs': 0.456115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.179020] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1012.179020] env[68233]: INFO nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Took 8.48 seconds to spawn the instance on the hypervisor. [ 1012.179020] env[68233]: DEBUG nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1012.179020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727ba008-37fe-4ef3-bb2c-2248a2c8f4f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.260760] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.371243] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.374471] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a876243f-71ea-4c3a-9863-309f46081e22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.383615] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 1012.383615] env[68233]: value = "task-2782864" [ 1012.383615] env[68233]: _type = "Task" [ 1012.383615] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.396183] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.484107] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3001ccad-7678-4ab8-83d6-27d65efd125b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.493299] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8adc8c-8c13-4e8e-baa5-4608cda7e223 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.527477] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f704f9-a77b-4d24-99c0-b4cf5598a9fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.535892] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbcff5b-2899-44fb-8d12-e131ec597ef1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.554388] env[68233]: DEBUG nova.compute.provider_tree [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.605416] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1012.631395] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1012.631825] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1012.632096] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1012.632459] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1012.632725] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1012.633067] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1012.633546] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1012.633546] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1012.633724] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1012.633987] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1012.634253] env[68233]: DEBUG nova.virt.hardware [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1012.635640] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa90c8b-a958-4bc6-b05e-bb1d329a8758 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.643713] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2cd46d-25d7-41fe-839c-68989a10b6d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.648241] env[68233]: INFO nova.compute.manager [-] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Took 1.49 seconds to deallocate network for instance. [ 1012.699236] env[68233]: INFO nova.compute.manager [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Took 19.16 seconds to build instance. [ 1012.797982] env[68233]: DEBUG nova.network.neutron [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updating instance_info_cache with network_info: [{"id": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "address": "fa:16:3e:ef:bf:2c", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap019c95d6-3c", "ovs_interfaceid": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.896584] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782864, 'name': PowerOffVM_Task, 'duration_secs': 0.236009} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.897095] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1012.897957] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1012.898371] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7bc342e9-61da-4b35-a976-1facca216b25 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.905597] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 1012.905597] env[68233]: value = "task-2782865" [ 1012.905597] env[68233]: _type = "Task" [ 1012.905597] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.913838] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782865, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.059989] env[68233]: DEBUG nova.scheduler.client.report [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.166039] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.204711] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c2eb1b74-873d-43f4-814a-e0acfb29a0f8 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.673s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.300947] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1013.417600] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1013.417871] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1013.418206] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559441', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'name': 'volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '287df4d5-4e98-464d-8f0a-4571c1e4df4f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'serial': '6b0ac6e3-b417-4371-87f2-0141bc97e81c'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1013.419035] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75783d6-9cd5-45e6-abb6-1378596278bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.439360] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2402b27-21cf-4373-b8ed-490ab8d26894 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.446890] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b47853e-c218-40fd-a9a7-7bafd2893eb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.470164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a155af19-4ac5-451c-b7a0-57087b01dd13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.488244] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] The volume has not been displaced from its original location: [datastore2] volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c/volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1013.493819] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Reconfiguring VM instance instance-00000056 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1013.495454] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0b331f9-cf19-449f-a6ae-15f1fd947df5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.509739] env[68233]: DEBUG nova.compute.manager [req-92996ba7-ccd1-49e5-bfc9-eadd58019764 req-793af68c-451b-4765-8e92-a8060ac86600 service nova] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Received event network-vif-deleted-4836d496-c0c3-42a6-8b3c-e86a6660174e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1013.515967] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 1013.515967] env[68233]: value = "task-2782866" [ 1013.515967] env[68233]: _type = "Task" [ 1013.515967] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.524974] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.565148] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.981s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.567605] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.753s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.569422] env[68233]: INFO nova.compute.claims [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1013.643607] env[68233]: INFO nova.network.neutron [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating port d9478083-21a3-4b61-ab65-e1281b8bac7b with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1013.723868] env[68233]: DEBUG nova.compute.manager [req-69abce05-920c-4aa0-8b70-cd10726f784d req-0f393da8-7d26-4b84-b4a5-6a135bbbe09a service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Received event network-vif-plugged-e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1013.724086] env[68233]: DEBUG oslo_concurrency.lockutils [req-69abce05-920c-4aa0-8b70-cd10726f784d req-0f393da8-7d26-4b84-b4a5-6a135bbbe09a service nova] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.724174] env[68233]: DEBUG oslo_concurrency.lockutils [req-69abce05-920c-4aa0-8b70-cd10726f784d req-0f393da8-7d26-4b84-b4a5-6a135bbbe09a service nova] Lock "35587446-6f3b-465b-a2a6-0b154374734c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.724657] env[68233]: DEBUG oslo_concurrency.lockutils [req-69abce05-920c-4aa0-8b70-cd10726f784d req-0f393da8-7d26-4b84-b4a5-6a135bbbe09a service nova] Lock "35587446-6f3b-465b-a2a6-0b154374734c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.724657] env[68233]: DEBUG nova.compute.manager [req-69abce05-920c-4aa0-8b70-cd10726f784d req-0f393da8-7d26-4b84-b4a5-6a135bbbe09a service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] No waiting events found dispatching network-vif-plugged-e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1013.724657] env[68233]: WARNING nova.compute.manager [req-69abce05-920c-4aa0-8b70-cd10726f784d req-0f393da8-7d26-4b84-b4a5-6a135bbbe09a service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Received unexpected event network-vif-plugged-e2eef47a-821b-4644-9b1b-6ca932ebe044 for instance with vm_state building and task_state spawning. [ 1013.850244] env[68233]: DEBUG nova.network.neutron [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Successfully updated port: e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1014.026886] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782866, 'name': ReconfigVM_Task, 'duration_secs': 0.276242} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.027194] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Reconfigured VM instance instance-00000056 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1014.031952] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7165ad8-39e7-4974-80ad-1e48130c8f69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.048018] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 1014.048018] env[68233]: value = "task-2782867" [ 1014.048018] env[68233]: _type = "Task" [ 1014.048018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.056927] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782867, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.131558] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1014.132567] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4619b18f-c687-4888-836c-e71732bc43a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.145225] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1014.145225] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d592c46d-2b84-4437-ab18-3ec472aca977 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.208614] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1014.208936] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1014.209051] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleting the datastore file [datastore2] d4b69710-7f74-4755-8783-63e36c67f57a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1014.209357] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00e12222-4e27-4198-a34e-a50e5df5bccc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.215435] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1014.215435] env[68233]: value = "task-2782869" [ 1014.215435] env[68233]: _type = "Task" [ 1014.215435] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.223956] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.354231] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.354452] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.354620] env[68233]: DEBUG nova.network.neutron [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1014.558752] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782867, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.728240] env[68233]: DEBUG oslo_vmware.api [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.386961} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.731495] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.731836] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1014.732153] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1014.758251] env[68233]: INFO nova.scheduler.client.report [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocations for instance d4b69710-7f74-4755-8783-63e36c67f57a [ 1014.888405] env[68233]: DEBUG nova.network.neutron [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.925031] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20932340-6bb9-47ce-ac15-de4d7cecafb9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.932718] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b27159-09e2-4fb6-9bab-81c8019c2901 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.965887] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd39228-242a-4b33-a88f-744e52c394f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.977825] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9574907-b5e7-4842-8913-f5c1aa2c76b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.996116] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.059103] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782867, 'name': ReconfigVM_Task, 'duration_secs': 0.818753} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.059442] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559441', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'name': 'volume-6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '287df4d5-4e98-464d-8f0a-4571c1e4df4f', 'attached_at': '', 'detached_at': '', 'volume_id': '6b0ac6e3-b417-4371-87f2-0141bc97e81c', 'serial': '6b0ac6e3-b417-4371-87f2-0141bc97e81c'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1015.059744] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.060549] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b7ec92-6015-4fa0-9662-fbf2b853ba37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.067838] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1015.068144] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-174f67a3-725a-4f5d-bafa-7bc772b8ce01 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.073761] env[68233]: DEBUG nova.compute.manager [req-6e485109-6aa7-4b3e-93e4-f806ea4b22f0 req-cf1327b3-f5f8-47e9-b03e-3569ea7e3e7e service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-vif-plugged-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1015.073990] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e485109-6aa7-4b3e-93e4-f806ea4b22f0 req-cf1327b3-f5f8-47e9-b03e-3569ea7e3e7e service nova] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.074222] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e485109-6aa7-4b3e-93e4-f806ea4b22f0 req-cf1327b3-f5f8-47e9-b03e-3569ea7e3e7e service nova] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.074398] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e485109-6aa7-4b3e-93e4-f806ea4b22f0 req-cf1327b3-f5f8-47e9-b03e-3569ea7e3e7e service nova] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.074545] env[68233]: DEBUG nova.compute.manager [req-6e485109-6aa7-4b3e-93e4-f806ea4b22f0 req-cf1327b3-f5f8-47e9-b03e-3569ea7e3e7e service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] No waiting events found dispatching network-vif-plugged-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1015.074735] env[68233]: WARNING nova.compute.manager [req-6e485109-6aa7-4b3e-93e4-f806ea4b22f0 req-cf1327b3-f5f8-47e9-b03e-3569ea7e3e7e service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received unexpected event network-vif-plugged-d9478083-21a3-4b61-ab65-e1281b8bac7b for instance with vm_state shelved_offloaded and task_state spawning. [ 1015.134059] env[68233]: DEBUG nova.network.neutron [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating instance_info_cache with network_info: [{"id": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "address": "fa:16:3e:43:49:55", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2eef47a-82", "ovs_interfaceid": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.141115] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.141337] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.141515] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Deleting the datastore file [datastore2] 287df4d5-4e98-464d-8f0a-4571c1e4df4f {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.141874] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d956562e-a765-4ac1-ab9e-7d7f9319aafe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.148063] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for the task: (returnval){ [ 1015.148063] env[68233]: value = "task-2782871" [ 1015.148063] env[68233]: _type = "Task" [ 1015.148063] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.157713] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.177056] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.177056] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.177191] env[68233]: DEBUG nova.network.neutron [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1015.265297] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.521034] env[68233]: ERROR nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [req-5a7933f9-36f6-42fb-abf9-41b0c27f7c2b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5a7933f9-36f6-42fb-abf9-41b0c27f7c2b"}]} [ 1015.533650] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Received event network-changed-e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1015.533740] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Refreshing instance network info cache due to event network-changed-e2eef47a-821b-4644-9b1b-6ca932ebe044. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1015.534038] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquiring lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.546099] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1015.563178] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1015.563435] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1015.576747] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1015.596013] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1015.636595] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.636963] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance network_info: |[{"id": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "address": "fa:16:3e:43:49:55", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2eef47a-82", "ovs_interfaceid": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1015.637284] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquired lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.637461] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Refreshing network info cache for port e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.638690] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:49:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2eef47a-821b-4644-9b1b-6ca932ebe044', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1015.646263] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1015.651278] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1015.652296] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f9586b5-c1c0-4fa8-ab20-a404f523ad28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.678621] env[68233]: DEBUG oslo_vmware.api [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Task: {'id': task-2782871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140454} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.680120] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.680320] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.680500] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1015.684046] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1015.684046] env[68233]: value = "task-2782872" [ 1015.684046] env[68233]: _type = "Task" [ 1015.684046] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.705385] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782872, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.745359] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1015.745765] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cbfecd0-a8cd-431f-904d-20bddddf3d68 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.759145] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c188f0f-fb9f-4403-91cc-cd8484c752e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.804546] env[68233]: ERROR nova.compute.manager [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Failed to detach volume 6b0ac6e3-b417-4371-87f2-0141bc97e81c from /dev/sda: nova.exception.InstanceNotFound: Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f could not be found. [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Traceback (most recent call last): [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self.driver.rebuild(**kwargs) [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] raise NotImplementedError() [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] NotImplementedError [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] During handling of the above exception, another exception occurred: [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Traceback (most recent call last): [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self.driver.detach_volume(context, old_connection_info, [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] return self._volumeops.detach_volume(connection_info, instance) [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._detach_volume_vmdk(connection_info, instance) [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] stable_ref.fetch_moref(session) [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] nova.exception.InstanceNotFound: Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f could not be found. [ 1015.804546] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.934471] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updated VIF entry in instance network info cache for port e2eef47a-821b-4644-9b1b-6ca932ebe044. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1015.934471] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating instance_info_cache with network_info: [{"id": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "address": "fa:16:3e:43:49:55", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2eef47a-82", "ovs_interfaceid": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.940343] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cbcaae-c730-4064-af7b-16b7b2d7c725 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.951291] env[68233]: DEBUG nova.network.neutron [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.953866] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d090be-42cf-4ad9-987c-482c8646a49f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.988822] env[68233]: DEBUG oslo_concurrency.lockutils [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "d4b69710-7f74-4755-8783-63e36c67f57a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.990847] env[68233]: DEBUG nova.compute.utils [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Build of instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f aborted: Failed to rebuild volume backed instance. {{(pid=68233) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 1015.993122] env[68233]: ERROR nova.compute.manager [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f aborted: Failed to rebuild volume backed instance. [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Traceback (most recent call last): [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self.driver.rebuild(**kwargs) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] raise NotImplementedError() [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] NotImplementedError [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] During handling of the above exception, another exception occurred: [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Traceback (most recent call last): [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._detach_root_volume(context, instance, root_bdm) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] with excutils.save_and_reraise_exception(): [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self.force_reraise() [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] raise self.value [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self.driver.detach_volume(context, old_connection_info, [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] return self._volumeops.detach_volume(connection_info, instance) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._detach_volume_vmdk(connection_info, instance) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] stable_ref.fetch_moref(session) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] nova.exception.InstanceNotFound: Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f could not be found. [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] During handling of the above exception, another exception occurred: [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Traceback (most recent call last): [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 11390, in _error_out_instance_on_exception [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] yield [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1015.993122] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._do_rebuild_instance_with_claim( [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._do_rebuild_instance( [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._rebuild_default_impl(**kwargs) [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] self._rebuild_volume_backed_instance( [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] raise exception.BuildAbortException( [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] nova.exception.BuildAbortException: Build of instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f aborted: Failed to rebuild volume backed instance. [ 1015.993983] env[68233]: ERROR nova.compute.manager [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] [ 1015.996663] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5428d29e-6e96-4506-89a4-8a72ff7c4576 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.005638] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b732d7f5-eb40-4fb4-bd70-e8bfb58677b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.021663] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.198276] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782872, 'name': CreateVM_Task, 'duration_secs': 0.39286} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.198582] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.199410] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.199600] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.199964] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1016.200279] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbd3f5e1-41b4-4715-b2a2-9f70261059fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.205737] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1016.205737] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dfaad2-6a7e-d736-0f8c-504f5d7a771b" [ 1016.205737] env[68233]: _type = "Task" [ 1016.205737] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.214219] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dfaad2-6a7e-d736-0f8c-504f5d7a771b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.438572] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Releasing lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.438971] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Received event network-vif-unplugged-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1016.439270] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquiring lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.439551] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.439803] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.440051] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] No waiting events found dispatching network-vif-unplugged-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1016.440293] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Received event network-vif-unplugged-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd for instance with task_state deleting. {{(pid=68233) _process_instance_event /opt/stack/nova/nova/compute/manager.py:11434}} [ 1016.440531] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Received event network-changed-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1016.440752] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Refreshing instance network info cache due to event network-changed-019c95d6-3ce8-430d-9fe1-c9d866f5f2cd. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1016.441014] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquiring lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.441224] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquired lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.441444] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Refreshing network info cache for port 019c95d6-3ce8-430d-9fe1-c9d866f5f2cd {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1016.459366] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.486453] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='8a8393c97ca7fc8be3a2d44c50e4a83a',container_format='bare',created_at=2025-03-06T03:56:07Z,direct_url=,disk_format='vmdk',id=d59a191b-5df7-4078-ba81-330dce0e225b,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-358345058-shelved',owner='978c6dbf1c10443da3253a58f1e5bdea',properties=ImageMetaProps,protected=,size=31663104,status='active',tags=,updated_at=2025-03-06T03:56:24Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1016.486713] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1016.486870] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1016.487069] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1016.487221] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1016.487367] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1016.487578] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1016.487737] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1016.487907] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1016.488085] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1016.488268] env[68233]: DEBUG nova.virt.hardware [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1016.489127] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf48dc6-9c8b-4cbb-8da7-061fdee8fc57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.501382] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d29d1d-fef9-4ebf-ab50-73b9d6736b79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.515599] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:5f:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4b033f4d-2e92-4702-add6-410a29d3f251', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9478083-21a3-4b61-ab65-e1281b8bac7b', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1016.523130] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1016.523973] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1016.528198] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91cc8312-af5b-4e1e-9ed0-6f8454e10ae7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.544553] env[68233]: ERROR nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [req-56b849e0-3fc4-4a88-a2ec-d9ca9d24d1c4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-56b849e0-3fc4-4a88-a2ec-d9ca9d24d1c4"}]} [ 1016.548334] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1016.548334] env[68233]: value = "task-2782873" [ 1016.548334] env[68233]: _type = "Task" [ 1016.548334] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.557785] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782873, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.563473] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1016.579062] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1016.579062] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1016.590487] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1016.611481] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1016.717606] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52dfaad2-6a7e-d736-0f8c-504f5d7a771b, 'name': SearchDatastore_Task, 'duration_secs': 0.02894} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.717921] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1016.718188] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1016.718394] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.718613] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.718722] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1016.721338] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ac50b60-522b-461d-9d86-db48ceda1df5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.730642] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1016.730828] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1016.731570] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a912c8cb-83c0-495a-8fa8-c8b2057a562d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.739748] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1016.739748] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5273c981-123a-c45a-8f09-403bd22da207" [ 1016.739748] env[68233]: _type = "Task" [ 1016.739748] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.747451] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5273c981-123a-c45a-8f09-403bd22da207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.880693] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6f4df8-b7c0-41cd-ad76-6c994741da58 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.889144] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8c6784-ce89-4ffa-ba69-16465c1f895c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.930605] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264c0f89-15c2-47c3-b37e-afabaad115e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.938334] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b4785a-89f8-4011-9f0d-a5d02f7bb00b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.953873] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1017.058808] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782873, 'name': CreateVM_Task, 'duration_secs': 0.474558} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.059010] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1017.059661] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.059825] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.060236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1017.060502] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b176a17e-e9c5-46c2-b4d2-b5f951e7b235 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.065419] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1017.065419] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52373f49-a492-ef14-9607-1fe0d69d22c9" [ 1017.065419] env[68233]: _type = "Task" [ 1017.065419] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.075381] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52373f49-a492-ef14-9607-1fe0d69d22c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.101728] env[68233]: DEBUG nova.compute.manager [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-changed-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1017.102009] env[68233]: DEBUG nova.compute.manager [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Refreshing instance network info cache due to event network-changed-d9478083-21a3-4b61-ab65-e1281b8bac7b. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1017.102235] env[68233]: DEBUG oslo_concurrency.lockutils [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.102378] env[68233]: DEBUG oslo_concurrency.lockutils [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.102619] env[68233]: DEBUG nova.network.neutron [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Refreshing network info cache for port d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.176695] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updated VIF entry in instance network info cache for port 019c95d6-3ce8-430d-9fe1-c9d866f5f2cd. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.177038] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updating instance_info_cache with network_info: [{"id": "019c95d6-3ce8-430d-9fe1-c9d866f5f2cd", "address": "fa:16:3e:ef:bf:2c", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": null, "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap019c95d6-3c", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.250930] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5273c981-123a-c45a-8f09-403bd22da207, 'name': SearchDatastore_Task, 'duration_secs': 0.013796} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.251815] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcb2897d-14bc-4e87-9c49-5889f97a6c04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.258046] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1017.258046] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5287eb12-24a2-4809-9edd-f7fc576b3a50" [ 1017.258046] env[68233]: _type = "Task" [ 1017.258046] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.266138] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5287eb12-24a2-4809-9edd-f7fc576b3a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.487156] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1017.487629] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 122 to 123 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1017.487937] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1017.577039] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.577648] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Processing image d59a191b-5df7-4078-ba81-330dce0e225b {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.577648] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.577836] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.578127] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.578588] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21aa8c69-0f40-47a2-b98b-5cf9627a2f83 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.588694] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.588694] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.589164] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a55d1840-3822-4ebf-9470-c1715eaf02d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.594696] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1017.594696] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a5b261-21e4-313f-1db7-1574cad0734f" [ 1017.594696] env[68233]: _type = "Task" [ 1017.594696] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.603270] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a5b261-21e4-313f-1db7-1574cad0734f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.681478] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Releasing lock "refresh_cache-d4b69710-7f74-4755-8783-63e36c67f57a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.681731] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-changed-7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1017.681980] env[68233]: DEBUG nova.compute.manager [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing instance network info cache due to event network-changed-7d72ccc2-ee10-4121-9a73-41bc93e7493e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1017.682209] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.682377] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.682508] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing network info cache for port 7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1017.770549] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5287eb12-24a2-4809-9edd-f7fc576b3a50, 'name': SearchDatastore_Task, 'duration_secs': 0.014376} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.770860] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.771753] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1017.771753] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e0a63fa-06f1-43e2-b485-1e4d19181cbc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.779029] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1017.779029] env[68233]: value = "task-2782874" [ 1017.779029] env[68233]: _type = "Task" [ 1017.779029] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.787963] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.865679] env[68233]: DEBUG nova.network.neutron [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updated VIF entry in instance network info cache for port d9478083-21a3-4b61-ab65-e1281b8bac7b. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1017.866060] env[68233]: DEBUG nova.network.neutron [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.993735] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.426s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.994432] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1017.997303] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.149s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.999359] env[68233]: INFO nova.compute.claims [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.015052] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.106335] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1018.106602] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Fetch image to [datastore2] OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612/OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1018.106794] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Downloading stream optimized image d59a191b-5df7-4078-ba81-330dce0e225b to [datastore2] OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612/OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612.vmdk on the data store datastore2 as vApp {{(pid=68233) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1018.106966] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Downloading image file data d59a191b-5df7-4078-ba81-330dce0e225b to the ESX as VM named 'OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612' {{(pid=68233) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1018.193453] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1018.193453] env[68233]: value = "resgroup-9" [ 1018.193453] env[68233]: _type = "ResourcePool" [ 1018.193453] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1018.193931] env[68233]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b14c0732-af23-4a82-8b0f-714ca8639c62 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.219811] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lease: (returnval){ [ 1018.219811] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f15ae3-5f6e-a1c7-2867-0d03b80ec82a" [ 1018.219811] env[68233]: _type = "HttpNfcLease" [ 1018.219811] env[68233]: } obtained for vApp import into resource pool (val){ [ 1018.219811] env[68233]: value = "resgroup-9" [ 1018.219811] env[68233]: _type = "ResourcePool" [ 1018.219811] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1018.220228] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the lease: (returnval){ [ 1018.220228] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f15ae3-5f6e-a1c7-2867-0d03b80ec82a" [ 1018.220228] env[68233]: _type = "HttpNfcLease" [ 1018.220228] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1018.228386] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1018.228386] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f15ae3-5f6e-a1c7-2867-0d03b80ec82a" [ 1018.228386] env[68233]: _type = "HttpNfcLease" [ 1018.228386] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1018.292312] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782874, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.369288] env[68233]: DEBUG oslo_concurrency.lockutils [req-253958a4-e738-4c78-bcab-d81856b0d549 req-7d1761bd-fedb-466e-b2f5-8ae352eb96ed service nova] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.504233] env[68233]: DEBUG nova.compute.utils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1018.507738] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1018.507839] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1018.535368] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updated VIF entry in instance network info cache for port 7d72ccc2-ee10-4121-9a73-41bc93e7493e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1018.535552] env[68233]: DEBUG nova.network.neutron [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.554605] env[68233]: DEBUG nova.policy [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77e05a39c81b454cabf776c1729c2169', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5db85d51fd424c0487aca461a2c641b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1018.728698] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1018.728698] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f15ae3-5f6e-a1c7-2867-0d03b80ec82a" [ 1018.728698] env[68233]: _type = "HttpNfcLease" [ 1018.728698] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1018.790148] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642089} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.790455] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1018.790740] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1018.790934] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-14f806c2-24b1-408d-a72f-67e881b75a2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.800130] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1018.800130] env[68233]: value = "task-2782876" [ 1018.800130] env[68233]: _type = "Task" [ 1018.800130] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.813349] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782876, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.843851] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Successfully created port: 50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1019.010766] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1019.038279] env[68233]: DEBUG oslo_concurrency.lockutils [req-6bef87bb-0d1b-4b78-a863-f5e344fd405c req-df7e6499-9a90-417d-8a24-cc5e39934265 service nova] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.116417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.116880] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.117245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.117593] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.117908] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.121135] env[68233]: INFO nova.compute.manager [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Terminating instance [ 1019.233082] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1019.233082] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f15ae3-5f6e-a1c7-2867-0d03b80ec82a" [ 1019.233082] env[68233]: _type = "HttpNfcLease" [ 1019.233082] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1019.233506] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1019.233506] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f15ae3-5f6e-a1c7-2867-0d03b80ec82a" [ 1019.233506] env[68233]: _type = "HttpNfcLease" [ 1019.233506] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1019.234394] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a81d8a-c3e6-4c10-9f64-612d455b6d25 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.242845] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5281bf1f-b60f-23f6-1812-0b12b9842d0c/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1019.243044] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating HTTP connection to write to file with size = 31663104 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5281bf1f-b60f-23f6-1812-0b12b9842d0c/disk-0.vmdk. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1019.317368] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fde09e09-d7ae-40bb-830f-03f569806db3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.319184] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782876, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071388} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.319792] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.321040] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d403624-3310-422a-9101-06c76e678aa4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.347091] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.350192] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43e425f3-8219-422b-9dc6-b94d2af7b71d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.372140] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1019.372140] env[68233]: value = "task-2782877" [ 1019.372140] env[68233]: _type = "Task" [ 1019.372140] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.382728] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782877, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.424036] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126faa02-b169-493c-807a-dbfdd0051535 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.434466] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52168087-0195-f7cd-4bed-f5272f31d9df/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1019.435476] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dcc530-14d5-465b-81e3-e9039dcf1899 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.438962] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9bd2d1-8b48-4735-9c04-b72cae72dbcc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.444592] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52168087-0195-f7cd-4bed-f5272f31d9df/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1019.444755] env[68233]: ERROR oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52168087-0195-f7cd-4bed-f5272f31d9df/disk-0.vmdk due to incomplete transfer. [ 1019.469699] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-38700694-5738-4ab1-9200-910717f43246 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.472255] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0589f5-158e-4472-bc30-c68ceeaf3523 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.479964] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6692a30d-78fc-413e-bc9b-d00860d4e714 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.484572] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52168087-0195-f7cd-4bed-f5272f31d9df/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1019.484789] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Uploaded image 1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1019.487050] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1019.487471] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ec9cf979-b374-469e-895c-ec9dbbb4232e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.496497] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.498782] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1019.498782] env[68233]: value = "task-2782878" [ 1019.498782] env[68233]: _type = "Task" [ 1019.498782] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.506710] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782878, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.625695] env[68233]: DEBUG nova.compute.manager [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1019.627690] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48b7e23f-49bf-4b49-9089-f46952512ddc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.637677] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6028ee5-99dc-4a0c-b047-72cde540c11c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.676760] env[68233]: WARNING nova.virt.vmwareapi.driver [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f could not be found. [ 1019.676974] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1019.680047] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2be42b87-78b2-40a0-860b-9ab2d34ebc7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.689543] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aead1fa-fd7b-4510-80eb-3cf201ee24b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.728662] env[68233]: WARNING nova.virt.vmwareapi.vmops [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f could not be found. [ 1019.728891] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1019.729094] env[68233]: INFO nova.compute.manager [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1019.729341] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1019.732981] env[68233]: DEBUG nova.compute.manager [-] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1019.733171] env[68233]: DEBUG nova.network.neutron [-] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1019.882364] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782877, 'name': ReconfigVM_Task, 'duration_secs': 0.364722} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.882739] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1019.883403] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e753bb8c-b811-4cd0-8c56-963cf438620a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.889995] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1019.889995] env[68233]: value = "task-2782879" [ 1019.889995] env[68233]: _type = "Task" [ 1019.889995] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.898119] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782879, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.002420] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1020.015968] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782878, 'name': Destroy_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.021345] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1020.050988] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1020.051271] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1020.051430] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1020.051612] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1020.051771] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1020.051936] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1020.052168] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1020.052330] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1020.052500] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1020.052751] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1020.052966] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1020.053852] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338c66a6-b3c0-4156-94a8-3c6479a31f19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.063207] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae964ed-7e38-40f2-b881-5a03bec9b894 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.404163] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782879, 'name': Rename_Task, 'duration_secs': 0.17218} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.406834] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1020.407114] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2a22cc7-5831-49a8-8413-9e269244b34c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.416015] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1020.416015] env[68233]: value = "task-2782880" [ 1020.416015] env[68233]: _type = "Task" [ 1020.416015] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.429520] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782880, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.469868] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1020.470245] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5281bf1f-b60f-23f6-1812-0b12b9842d0c/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1020.470987] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daaff3f4-da5c-4b6a-adce-87b0a9f9b4a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.477642] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5281bf1f-b60f-23f6-1812-0b12b9842d0c/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1020.477830] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5281bf1f-b60f-23f6-1812-0b12b9842d0c/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1020.478079] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f577d21c-9661-48c4-bfa8-53a9643ddb8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.510538] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.513s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.511064] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1020.513826] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.598s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.515263] env[68233]: INFO nova.compute.claims [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.522842] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782878, 'name': Destroy_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.579067] env[68233]: DEBUG nova.compute.manager [req-2148b60b-2bd0-4309-b945-247cd6f086f3 req-f59e9c85-e472-4e7d-9ed8-05b6d20d1964 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Received event network-vif-plugged-50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1020.579337] env[68233]: DEBUG oslo_concurrency.lockutils [req-2148b60b-2bd0-4309-b945-247cd6f086f3 req-f59e9c85-e472-4e7d-9ed8-05b6d20d1964 service nova] Acquiring lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.579589] env[68233]: DEBUG oslo_concurrency.lockutils [req-2148b60b-2bd0-4309-b945-247cd6f086f3 req-f59e9c85-e472-4e7d-9ed8-05b6d20d1964 service nova] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.579856] env[68233]: DEBUG oslo_concurrency.lockutils [req-2148b60b-2bd0-4309-b945-247cd6f086f3 req-f59e9c85-e472-4e7d-9ed8-05b6d20d1964 service nova] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.580312] env[68233]: DEBUG nova.compute.manager [req-2148b60b-2bd0-4309-b945-247cd6f086f3 req-f59e9c85-e472-4e7d-9ed8-05b6d20d1964 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] No waiting events found dispatching network-vif-plugged-50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1020.580493] env[68233]: WARNING nova.compute.manager [req-2148b60b-2bd0-4309-b945-247cd6f086f3 req-f59e9c85-e472-4e7d-9ed8-05b6d20d1964 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Received unexpected event network-vif-plugged-50656146-977c-4d5e-b10d-73efad3f7bef for instance with vm_state building and task_state spawning. [ 1020.716603] env[68233]: DEBUG oslo_vmware.rw_handles [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5281bf1f-b60f-23f6-1812-0b12b9842d0c/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1020.716821] env[68233]: INFO nova.virt.vmwareapi.images [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Downloaded image file data d59a191b-5df7-4078-ba81-330dce0e225b [ 1020.717840] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7040fdea-a7c3-4eb4-8f6c-0f71e7d766f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.737481] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d36c7014-129c-463c-8cfe-6b689c517807 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.763025] env[68233]: INFO nova.virt.vmwareapi.images [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] The imported VM was unregistered [ 1020.764956] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1020.765217] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Creating directory with path [datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1020.765483] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a67f2288-d1e9-476b-852d-cd086f04c7ef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.768126] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Successfully updated port: 50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1020.778551] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Created directory with path [datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1020.778776] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612/OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612.vmdk to [datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk. {{(pid=68233) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1020.779452] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f9e15b65-4de4-4c16-b283-cc824275c1e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.786453] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1020.786453] env[68233]: value = "task-2782882" [ 1020.786453] env[68233]: _type = "Task" [ 1020.786453] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.795716] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782882, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.926861] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782880, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.016651] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782878, 'name': Destroy_Task, 'duration_secs': 1.371241} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.016977] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Destroyed the VM [ 1021.017239] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1021.018621] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9030f54a-042d-4624-bfe5-f95248144094 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.022493] env[68233]: DEBUG nova.compute.utils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1021.027305] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1021.027305] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1021.037276] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1021.037276] env[68233]: value = "task-2782883" [ 1021.037276] env[68233]: _type = "Task" [ 1021.037276] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.048516] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782883, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.094287] env[68233]: DEBUG nova.policy [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77e05a39c81b454cabf776c1729c2169', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5db85d51fd424c0487aca461a2c641b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1021.123299] env[68233]: DEBUG nova.compute.manager [req-33ed9293-d4b3-45bb-8d88-8d19275db4af req-777c9d68-838b-484a-b753-1c2098df9597 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Received event network-vif-deleted-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1021.124069] env[68233]: INFO nova.compute.manager [req-33ed9293-d4b3-45bb-8d88-8d19275db4af req-777c9d68-838b-484a-b753-1c2098df9597 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Neutron deleted interface ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810; detaching it from the instance and deleting it from the info cache [ 1021.125314] env[68233]: DEBUG nova.network.neutron [req-33ed9293-d4b3-45bb-8d88-8d19275db4af req-777c9d68-838b-484a-b753-1c2098df9597 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.273239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "refresh_cache-4922985d-ad04-4c34-8dcb-6e6f8df94ff9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1021.273239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "refresh_cache-4922985d-ad04-4c34-8dcb-6e6f8df94ff9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1021.273528] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1021.297748] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782882, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.424733] env[68233]: DEBUG oslo_vmware.api [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782880, 'name': PowerOnVM_Task, 'duration_secs': 0.552121} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.425499] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1021.425712] env[68233]: INFO nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Took 8.82 seconds to spawn the instance on the hypervisor. [ 1021.425957] env[68233]: DEBUG nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1021.426907] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfedc114-cbc2-40e7-862c-ee2313a6e6c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.527522] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1021.552918] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782883, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.592427] env[68233]: DEBUG nova.network.neutron [-] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.596437] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Successfully created port: 4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1021.630543] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc7b8f4d-c6c8-4da9-a6a5-0ff259e79221 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.641037] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61e4c1c-6315-4a1a-9c8b-9e3861c11cdd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.679204] env[68233]: DEBUG nova.compute.manager [req-33ed9293-d4b3-45bb-8d88-8d19275db4af req-777c9d68-838b-484a-b753-1c2098df9597 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Detach interface failed, port_id=ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810, reason: Instance 287df4d5-4e98-464d-8f0a-4571c1e4df4f could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1021.798984] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782882, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.822106] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1021.893095] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e56e846-d249-429d-808c-4f83e5a48c1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.901455] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c89f26-be17-4df4-8080-3352ed742fd3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.936620] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9931d36b-f22b-4f61-b5c8-c9ac4e86db62 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.950485] env[68233]: INFO nova.compute.manager [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Took 18.96 seconds to build instance. [ 1021.955245] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca95d6f0-6a56-42d1-9713-8c0289d96f99 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.974764] env[68233]: DEBUG nova.compute.provider_tree [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.050028] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782883, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.059152] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Updating instance_info_cache with network_info: [{"id": "50656146-977c-4d5e-b10d-73efad3f7bef", "address": "fa:16:3e:87:dc:9f", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50656146-97", "ovs_interfaceid": "50656146-977c-4d5e-b10d-73efad3f7bef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.096900] env[68233]: INFO nova.compute.manager [-] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Took 2.36 seconds to deallocate network for instance. [ 1022.297430] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782882, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.460373] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4103296d-70ac-4ebd-8938-90c01b03021d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.475s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.478643] env[68233]: DEBUG nova.scheduler.client.report [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.538204] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1022.555791] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782883, 'name': RemoveSnapshot_Task, 'duration_secs': 1.268367} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.558134] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1022.558498] env[68233]: DEBUG nova.compute.manager [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.559720] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81abff6-4c20-4834-9d09-097ba3fdba6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.566073] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "refresh_cache-4922985d-ad04-4c34-8dcb-6e6f8df94ff9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1022.566446] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Instance network_info: |[{"id": "50656146-977c-4d5e-b10d-73efad3f7bef", "address": "fa:16:3e:87:dc:9f", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50656146-97", "ovs_interfaceid": "50656146-977c-4d5e-b10d-73efad3f7bef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1022.569086] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:dc:9f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50656146-977c-4d5e-b10d-73efad3f7bef', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1022.578874] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Creating folder: Project (5db85d51fd424c0487aca461a2c641b1). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1022.582644] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1022.582993] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1022.583188] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1022.583377] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1022.583524] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1022.583683] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1022.583965] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1022.584171] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1022.584350] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1022.584516] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1022.584690] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1022.585049] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f12d3908-9162-43e0-b41d-d2a468c56975 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.587947] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f797d82f-e3a2-4549-8991-f1ec95e0cc81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.605041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29d0c35-1c0e-400d-8d82-1031b4a48d36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.608264] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Created folder: Project (5db85d51fd424c0487aca461a2c641b1) in parent group-v559223. [ 1022.608478] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Creating folder: Instances. Parent ref: group-v559481. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1022.609119] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8b3fe52a-5fef-4b94-9308-54fc5b5c5485 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.630375] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Created folder: Instances in parent group-v559481. [ 1022.631186] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1022.631840] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1022.632908] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-127e1b05-e3b1-494e-9211-7a370bd05ef3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.655177] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1022.655177] env[68233]: value = "task-2782886" [ 1022.655177] env[68233]: _type = "Task" [ 1022.655177] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.664881] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782886, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.675967] env[68233]: DEBUG nova.compute.manager [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Received event network-changed-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1022.676434] env[68233]: DEBUG nova.compute.manager [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Refreshing instance network info cache due to event network-changed-ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1022.676698] env[68233]: DEBUG oslo_concurrency.lockutils [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] Acquiring lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.676825] env[68233]: DEBUG oslo_concurrency.lockutils [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] Acquired lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1022.676989] env[68233]: DEBUG nova.network.neutron [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Refreshing network info cache for port ce58be4a-6fc6-4b9b-8427-cf8a9dbbd810 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1022.681978] env[68233]: INFO nova.compute.manager [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Took 0.58 seconds to detach 1 volumes for instance. [ 1022.682275] env[68233]: DEBUG nova.compute.manager [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Deleting volume: 6b0ac6e3-b417-4371-87f2-0141bc97e81c {{(pid=68233) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1022.802640] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782882, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.988833] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.991396] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.995056] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.480s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.995295] env[68233]: DEBUG nova.objects.instance [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1023.100980] env[68233]: INFO nova.compute.manager [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Shelve offloading [ 1023.166860] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782886, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.213293] env[68233]: DEBUG nova.network.neutron [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1023.244498] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.270017] env[68233]: DEBUG nova.network.neutron [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.299308] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782882, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.432921} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.299554] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612/OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612.vmdk to [datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk. [ 1023.299757] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Cleaning up location [datastore2] OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1023.299918] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_17b3998e-216c-4e75-84a2-381ba24c2612 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.300191] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d70f774-b7f8-4bae-9e63-1b73f02f39be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.305982] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1023.305982] env[68233]: value = "task-2782888" [ 1023.305982] env[68233]: _type = "Task" [ 1023.305982] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.314007] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.354527] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Successfully updated port: 4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1023.497032] env[68233]: DEBUG nova.compute.utils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.497692] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1023.497879] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.554364] env[68233]: DEBUG nova.policy [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '77e05a39c81b454cabf776c1729c2169', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5db85d51fd424c0487aca461a2c641b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1023.606401] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.606775] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18b3145d-35f0-40af-9f3d-b63da7a2a103 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.616181] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1023.616181] env[68233]: value = "task-2782889" [ 1023.616181] env[68233]: _type = "Task" [ 1023.616181] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.625943] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1023.626197] env[68233]: DEBUG nova.compute.manager [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1023.626936] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80bb36d2-8949-4f1b-af96-67947d8968cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.632978] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.633173] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.633360] env[68233]: DEBUG nova.network.neutron [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1023.666259] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782886, 'name': CreateVM_Task, 'duration_secs': 0.631911} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.666438] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1023.669223] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.669223] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.669223] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1023.669223] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01be0b67-db70-4edf-90c3-9dc478452a7e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.674571] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1023.674571] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528e9fab-80ea-dffb-0483-12226fc81c2d" [ 1023.674571] env[68233]: _type = "Task" [ 1023.674571] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.685945] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528e9fab-80ea-dffb-0483-12226fc81c2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.773551] env[68233]: DEBUG oslo_concurrency.lockutils [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] Releasing lock "refresh_cache-287df4d5-4e98-464d-8f0a-4571c1e4df4f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.774206] env[68233]: DEBUG nova.compute.manager [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Received event network-changed-50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1023.774206] env[68233]: DEBUG nova.compute.manager [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Refreshing instance network info cache due to event network-changed-50656146-977c-4d5e-b10d-73efad3f7bef. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1023.774879] env[68233]: DEBUG oslo_concurrency.lockutils [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] Acquiring lock "refresh_cache-4922985d-ad04-4c34-8dcb-6e6f8df94ff9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.775058] env[68233]: DEBUG oslo_concurrency.lockutils [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] Acquired lock "refresh_cache-4922985d-ad04-4c34-8dcb-6e6f8df94ff9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.775298] env[68233]: DEBUG nova.network.neutron [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Refreshing network info cache for port 50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1023.818385] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033673} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.818385] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.818385] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "[datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1023.818385] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk to [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1023.818385] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c8b6edd-6ba6-4c71-9cc0-a46e37fc14c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.825240] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1023.825240] env[68233]: value = "task-2782890" [ 1023.825240] env[68233]: _type = "Task" [ 1023.825240] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.841024] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.859248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "refresh_cache-5d99e0cb-9742-4a6c-84d0-f8d916ef9104" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1023.859248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "refresh_cache-5d99e0cb-9742-4a6c-84d0-f8d916ef9104" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1023.859248] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1023.905343] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Successfully created port: f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1024.001221] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1024.004777] env[68233]: DEBUG oslo_concurrency.lockutils [None req-58385726-3748-4d1b-a6fe-3ca9959711d7 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.005893] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.473s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.006125] env[68233]: DEBUG nova.objects.instance [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lazy-loading 'resources' on Instance uuid d926386c-8543-4a6e-a782-588680cb5f34 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1024.192269] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528e9fab-80ea-dffb-0483-12226fc81c2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010628} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.192510] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.192765] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.195971] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.195971] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.195971] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.195971] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdc2c613-9441-4e57-a753-4e98e63ee0bd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.206743] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.206984] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.208042] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dbc8d1b-4ea7-48fa-8610-b95d41d0b0df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.215944] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1024.215944] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cdebcd-4332-a59d-e216-046a5e6db8f2" [ 1024.215944] env[68233]: _type = "Task" [ 1024.215944] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.225497] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cdebcd-4332-a59d-e216-046a5e6db8f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.336514] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782890, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.458495] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1024.713070] env[68233]: DEBUG nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Received event network-changed-e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1024.713070] env[68233]: DEBUG nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Refreshing instance network info cache due to event network-changed-e2eef47a-821b-4644-9b1b-6ca932ebe044. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1024.713070] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Acquiring lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.713070] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Acquired lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.713070] env[68233]: DEBUG nova.network.neutron [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Refreshing network info cache for port e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1024.730407] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cdebcd-4332-a59d-e216-046a5e6db8f2, 'name': SearchDatastore_Task, 'duration_secs': 0.082376} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.734735] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-830821cd-13c2-45a7-a4b5-0aa65abe053a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.742828] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1024.742828] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52856aa0-a8f0-ecaf-efdd-330f2c4b080f" [ 1024.742828] env[68233]: _type = "Task" [ 1024.742828] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.754422] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52856aa0-a8f0-ecaf-efdd-330f2c4b080f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.816902] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cceec15-1d59-4933-8182-9faf508b0def {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.827582] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fddbbec-6d2b-4907-a2d9-b037d5fc44cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.838728] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782890, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.867575] env[68233]: DEBUG nova.network.neutron [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.869756] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9537683-2e7e-4995-94ad-2eef50967225 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.881245] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57640b94-5105-482f-908c-f82422c8d0e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.899062] env[68233]: DEBUG nova.compute.provider_tree [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.014431] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1025.018172] env[68233]: DEBUG nova.network.neutron [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Updated VIF entry in instance network info cache for port 50656146-977c-4d5e-b10d-73efad3f7bef. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.018537] env[68233]: DEBUG nova.network.neutron [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Updating instance_info_cache with network_info: [{"id": "50656146-977c-4d5e-b10d-73efad3f7bef", "address": "fa:16:3e:87:dc:9f", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50656146-97", "ovs_interfaceid": "50656146-977c-4d5e-b10d-73efad3f7bef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.020416] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Updating instance_info_cache with network_info: [{"id": "4953ebd5-dce1-491d-a724-c337c5569470", "address": "fa:16:3e:54:61:c1", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4953ebd5-dc", "ovs_interfaceid": "4953ebd5-dce1-491d-a724-c337c5569470", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.048478] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1025.048783] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1025.048970] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1025.049559] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1025.049724] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1025.049887] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1025.050116] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1025.050280] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1025.050451] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1025.050615] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1025.050790] env[68233]: DEBUG nova.virt.hardware [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1025.052057] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4c9321-6725-4dee-8e89-73c10ed3b15c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.061262] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027f7b7d-ff94-424c-b887-3fd9692cb90a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.253684] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52856aa0-a8f0-ecaf-efdd-330f2c4b080f, 'name': SearchDatastore_Task, 'duration_secs': 0.088508} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.253967] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.254263] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4922985d-ad04-4c34-8dcb-6e6f8df94ff9/4922985d-ad04-4c34-8dcb-6e6f8df94ff9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.254524] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c35e36de-a980-4ab2-b14e-0617435dbe57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.262587] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1025.262587] env[68233]: value = "task-2782891" [ 1025.262587] env[68233]: _type = "Task" [ 1025.262587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.271724] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.341430] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782890, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.374295] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.402409] env[68233]: DEBUG nova.scheduler.client.report [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1025.419017] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Successfully updated port: f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.527045] env[68233]: DEBUG oslo_concurrency.lockutils [req-b660a33c-d570-467c-9155-29b0ac13acdd req-45218ca0-8049-4ee8-9315-4d20590fb1a6 service nova] Releasing lock "refresh_cache-4922985d-ad04-4c34-8dcb-6e6f8df94ff9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.527628] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "refresh_cache-5d99e0cb-9742-4a6c-84d0-f8d916ef9104" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.527973] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Instance network_info: |[{"id": "4953ebd5-dce1-491d-a724-c337c5569470", "address": "fa:16:3e:54:61:c1", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4953ebd5-dc", "ovs_interfaceid": "4953ebd5-dce1-491d-a724-c337c5569470", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1025.528392] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:61:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4953ebd5-dce1-491d-a724-c337c5569470', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1025.536065] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1025.536317] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1025.536552] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-418bb0d1-aa79-44e2-b8c0-66f3c687f307 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.558814] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1025.558814] env[68233]: value = "task-2782892" [ 1025.558814] env[68233]: _type = "Task" [ 1025.558814] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.566794] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782892, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.732354] env[68233]: DEBUG nova.network.neutron [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updated VIF entry in instance network info cache for port e2eef47a-821b-4644-9b1b-6ca932ebe044. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1025.732866] env[68233]: DEBUG nova.network.neutron [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating instance_info_cache with network_info: [{"id": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "address": "fa:16:3e:43:49:55", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.140", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2eef47a-82", "ovs_interfaceid": "e2eef47a-821b-4644-9b1b-6ca932ebe044", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.773405] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782891, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.825656] env[68233]: DEBUG nova.compute.manager [req-df355af0-25ce-40be-9068-a7216730d1a9 req-78997aad-f8db-48c2-ab30-c4fbfc6cefb9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-vif-unplugged-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1025.826543] env[68233]: DEBUG oslo_concurrency.lockutils [req-df355af0-25ce-40be-9068-a7216730d1a9 req-78997aad-f8db-48c2-ab30-c4fbfc6cefb9 service nova] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.826790] env[68233]: DEBUG oslo_concurrency.lockutils [req-df355af0-25ce-40be-9068-a7216730d1a9 req-78997aad-f8db-48c2-ab30-c4fbfc6cefb9 service nova] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.826968] env[68233]: DEBUG oslo_concurrency.lockutils [req-df355af0-25ce-40be-9068-a7216730d1a9 req-78997aad-f8db-48c2-ab30-c4fbfc6cefb9 service nova] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.827161] env[68233]: DEBUG nova.compute.manager [req-df355af0-25ce-40be-9068-a7216730d1a9 req-78997aad-f8db-48c2-ab30-c4fbfc6cefb9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] No waiting events found dispatching network-vif-unplugged-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1025.827382] env[68233]: WARNING nova.compute.manager [req-df355af0-25ce-40be-9068-a7216730d1a9 req-78997aad-f8db-48c2-ab30-c4fbfc6cefb9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received unexpected event network-vif-unplugged-be512a20-e94b-4c51-8658-24c6e1feba94 for instance with vm_state shelved and task_state shelving_offloading. [ 1025.840036] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782890, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.909313] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.911264] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.874s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.913159] env[68233]: INFO nova.compute.claims [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1025.918099] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1025.918099] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf2815a-9cd6-4ae1-af7f-5b7510a21a49 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.922204] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "refresh_cache-619230c4-f642-4835-8c5a-84ece6610e0f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.922204] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "refresh_cache-619230c4-f642-4835-8c5a-84ece6610e0f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.922318] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.929847] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1025.929847] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cdeb196-b8cd-49df-8acf-1c3000f81859 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.946133] env[68233]: INFO nova.scheduler.client.report [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Deleted allocations for instance d926386c-8543-4a6e-a782-588680cb5f34 [ 1026.018365] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.018556] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.018586] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleting the datastore file [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.018920] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf3bfab9-fdfe-4f86-bb9b-69ecf52d6472 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.027896] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1026.027896] env[68233]: value = "task-2782894" [ 1026.027896] env[68233]: _type = "Task" [ 1026.027896] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.039537] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.071250] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782892, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.237134] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Releasing lock "refresh_cache-35587446-6f3b-465b-a2a6-0b154374734c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.237134] env[68233]: DEBUG nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Received event network-vif-plugged-4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1026.237134] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Acquiring lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.237134] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.237268] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.237699] env[68233]: DEBUG nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] No waiting events found dispatching network-vif-plugged-4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1026.237699] env[68233]: WARNING nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Received unexpected event network-vif-plugged-4953ebd5-dce1-491d-a724-c337c5569470 for instance with vm_state building and task_state spawning. [ 1026.237897] env[68233]: DEBUG nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Received event network-changed-4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1026.238106] env[68233]: DEBUG nova.compute.manager [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Refreshing instance network info cache due to event network-changed-4953ebd5-dce1-491d-a724-c337c5569470. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1026.238337] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Acquiring lock "refresh_cache-5d99e0cb-9742-4a6c-84d0-f8d916ef9104" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.238895] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Acquired lock "refresh_cache-5d99e0cb-9742-4a6c-84d0-f8d916ef9104" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.238895] env[68233]: DEBUG nova.network.neutron [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Refreshing network info cache for port 4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.275661] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782891, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.342215] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782890, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.40317} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.342215] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/d59a191b-5df7-4078-ba81-330dce0e225b/d59a191b-5df7-4078-ba81-330dce0e225b.vmdk to [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.343389] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503ad334-0e17-4c29-ab82-88fff124e0af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.372499] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.373087] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8ac41c2-a2ad-4124-afd5-fdd03ecd8195 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.398616] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1026.398616] env[68233]: value = "task-2782895" [ 1026.398616] env[68233]: _type = "Task" [ 1026.398616] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.408882] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782895, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.459326] env[68233]: DEBUG oslo_concurrency.lockutils [None req-840be313-24af-4d28-97c7-0147a279f35b tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "d926386c-8543-4a6e-a782-588680cb5f34" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.424s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.475262] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.542790] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.576177] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782892, 'name': CreateVM_Task, 'duration_secs': 0.899903} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.576428] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1026.577310] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.577527] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.577939] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1026.578328] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-512bc290-ac27-43d7-a317-11071695b781 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.587154] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1026.587154] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524fc59e-0bf6-d787-d1fa-106b443cbedf" [ 1026.587154] env[68233]: _type = "Task" [ 1026.587154] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.595378] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524fc59e-0bf6-d787-d1fa-106b443cbedf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.646458] env[68233]: DEBUG nova.network.neutron [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Updating instance_info_cache with network_info: [{"id": "f9953dba-7fc8-49a3-b5d0-41d95d904ce4", "address": "fa:16:3e:7d:ef:66", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9953dba-7f", "ovs_interfaceid": "f9953dba-7fc8-49a3-b5d0-41d95d904ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.737546] env[68233]: DEBUG nova.compute.manager [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Received event network-vif-plugged-f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1026.737760] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] Acquiring lock "619230c4-f642-4835-8c5a-84ece6610e0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.738463] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] Lock "619230c4-f642-4835-8c5a-84ece6610e0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.738647] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] Lock "619230c4-f642-4835-8c5a-84ece6610e0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.738820] env[68233]: DEBUG nova.compute.manager [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] No waiting events found dispatching network-vif-plugged-f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1026.738994] env[68233]: WARNING nova.compute.manager [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Received unexpected event network-vif-plugged-f9953dba-7fc8-49a3-b5d0-41d95d904ce4 for instance with vm_state building and task_state spawning. [ 1026.742603] env[68233]: DEBUG nova.compute.manager [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Received event network-changed-f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1026.742603] env[68233]: DEBUG nova.compute.manager [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Refreshing instance network info cache due to event network-changed-f9953dba-7fc8-49a3-b5d0-41d95d904ce4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1026.742603] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] Acquiring lock "refresh_cache-619230c4-f642-4835-8c5a-84ece6610e0f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.774432] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782891, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.342642} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.774728] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 4922985d-ad04-4c34-8dcb-6e6f8df94ff9/4922985d-ad04-4c34-8dcb-6e6f8df94ff9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.774984] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.775275] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f42ea2c3-1a95-486f-9512-44529cfa161d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.782062] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1026.782062] env[68233]: value = "task-2782896" [ 1026.782062] env[68233]: _type = "Task" [ 1026.782062] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.790533] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.909540] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782895, 'name': ReconfigVM_Task, 'duration_secs': 0.435311} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.909877] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Reconfigured VM instance instance-0000004a to attach disk [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb/dca145c8-ed95-4dfb-9534-37035c75dafb.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.910498] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af30f253-57d8-4506-81db-975bc2829063 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.917259] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1026.917259] env[68233]: value = "task-2782897" [ 1026.917259] env[68233]: _type = "Task" [ 1026.917259] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.925331] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782897, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.039068] env[68233]: DEBUG oslo_vmware.api [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2782894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.579694} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.041872] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.042100] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.042832] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.072088] env[68233]: INFO nova.scheduler.client.report [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted allocations for instance 62cd066c-5eac-4f07-bf4e-9275fedc7384 [ 1027.100065] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524fc59e-0bf6-d787-d1fa-106b443cbedf, 'name': SearchDatastore_Task, 'duration_secs': 0.01151} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.104081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.104365] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.104611] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.104764] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.106247] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.107635] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1aed3383-70a6-4f00-a18e-90f5169d9700 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.129415] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.129613] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.133243] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27f409e6-f259-4419-bbc4-69de5e66cea7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.138722] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1027.138722] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ce0b33-0f0a-342d-30f7-58c7bd7d6aed" [ 1027.138722] env[68233]: _type = "Task" [ 1027.138722] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.154840] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "refresh_cache-619230c4-f642-4835-8c5a-84ece6610e0f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.155211] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Instance network_info: |[{"id": "f9953dba-7fc8-49a3-b5d0-41d95d904ce4", "address": "fa:16:3e:7d:ef:66", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9953dba-7f", "ovs_interfaceid": "f9953dba-7fc8-49a3-b5d0-41d95d904ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1027.155874] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ce0b33-0f0a-342d-30f7-58c7bd7d6aed, 'name': SearchDatastore_Task, 'duration_secs': 0.009168} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.156167] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] Acquired lock "refresh_cache-619230c4-f642-4835-8c5a-84ece6610e0f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.156349] env[68233]: DEBUG nova.network.neutron [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Refreshing network info cache for port f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.157646] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:ef:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6685c85e-be1e-4b7b-a6cc-3e50e59b6567', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9953dba-7fc8-49a3-b5d0-41d95d904ce4', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1027.168286] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1027.180313] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1027.180398] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66aca13-f304-446a-8983-46a859e05c5b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.185034] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1cc9e4a-0696-486e-b6f7-b52a694e2207 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.205184] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1027.205184] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b99158-1ac2-8776-f02b-72f8ca3dfb0b" [ 1027.205184] env[68233]: _type = "Task" [ 1027.205184] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.206556] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1027.206556] env[68233]: value = "task-2782899" [ 1027.206556] env[68233]: _type = "Task" [ 1027.206556] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.221690] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b99158-1ac2-8776-f02b-72f8ca3dfb0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.226873] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782899, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.293665] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068086} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.293965] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.294886] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6798a76-8705-4cd6-b240-6bd99e5cfd5a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.320106] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Reconfiguring VM instance instance-00000060 to attach disk [datastore2] 4922985d-ad04-4c34-8dcb-6e6f8df94ff9/4922985d-ad04-4c34-8dcb-6e6f8df94ff9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.321262] env[68233]: DEBUG nova.network.neutron [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Updated VIF entry in instance network info cache for port 4953ebd5-dce1-491d-a724-c337c5569470. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.321642] env[68233]: DEBUG nova.network.neutron [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Updating instance_info_cache with network_info: [{"id": "4953ebd5-dce1-491d-a724-c337c5569470", "address": "fa:16:3e:54:61:c1", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4953ebd5-dc", "ovs_interfaceid": "4953ebd5-dce1-491d-a724-c337c5569470", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.325325] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5a52f5f-5b23-4bb4-8376-ada27449305f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.348971] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1027.348971] env[68233]: value = "task-2782900" [ 1027.348971] env[68233]: _type = "Task" [ 1027.348971] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.357204] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1419d5-d287-48a1-8821-fb61e25b2e66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.363888] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782900, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.368592] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323a1440-46e5-4d5b-9931-e6655eda9b11 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.404647] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8faf930d-7e24-4f94-a9f0-1b4beccf020c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.413275] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea48069-be4e-46f7-9dfd-0898c0a60a73 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.429355] env[68233]: DEBUG nova.compute.provider_tree [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.438311] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782897, 'name': Rename_Task, 'duration_secs': 0.195076} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.438311] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1027.438311] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2c0a886-701e-4b72-a5be-4b2912b33c5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.444486] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1027.444486] env[68233]: value = "task-2782901" [ 1027.444486] env[68233]: _type = "Task" [ 1027.444486] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.451994] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782901, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.583634] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.615775] env[68233]: DEBUG nova.network.neutron [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Updated VIF entry in instance network info cache for port f9953dba-7fc8-49a3-b5d0-41d95d904ce4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.616173] env[68233]: DEBUG nova.network.neutron [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Updating instance_info_cache with network_info: [{"id": "f9953dba-7fc8-49a3-b5d0-41d95d904ce4", "address": "fa:16:3e:7d:ef:66", "network": {"id": "c650c5ea-f40d-4329-a6b7-842143f578da", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-378034928-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5db85d51fd424c0487aca461a2c641b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6685c85e-be1e-4b7b-a6cc-3e50e59b6567", "external-id": "nsx-vlan-transportzone-129", "segmentation_id": 129, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9953dba-7f", "ovs_interfaceid": "f9953dba-7fc8-49a3-b5d0-41d95d904ce4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.727898] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b99158-1ac2-8776-f02b-72f8ca3dfb0b, 'name': SearchDatastore_Task, 'duration_secs': 0.026523} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.732830] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.733405] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 5d99e0cb-9742-4a6c-84d0-f8d916ef9104/5d99e0cb-9742-4a6c-84d0-f8d916ef9104.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1027.733725] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782899, 'name': CreateVM_Task, 'duration_secs': 0.428152} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.733977] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3f5c2b1-ca39-422e-b7e4-f5ebd4cd4851 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.736719] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.737679] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.737966] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.738665] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1027.739396] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbbb08be-6b2d-41dd-8c6c-befd88274d9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.744048] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1027.744048] env[68233]: value = "task-2782902" [ 1027.744048] env[68233]: _type = "Task" [ 1027.744048] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.745911] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1027.745911] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ed3298-38f0-87a4-e35c-643f47c0e0e2" [ 1027.745911] env[68233]: _type = "Task" [ 1027.745911] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.766664] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ed3298-38f0-87a4-e35c-643f47c0e0e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.766989] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782902, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.843467] env[68233]: DEBUG oslo_concurrency.lockutils [req-47d148f8-a3bc-430c-bed0-3f9f5850a0a6 req-06408fe1-45aa-4dfe-a1d3-067744a18e57 service nova] Releasing lock "refresh_cache-5d99e0cb-9742-4a6c-84d0-f8d916ef9104" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.859845] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782900, 'name': ReconfigVM_Task, 'duration_secs': 0.372246} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.860196] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Reconfigured VM instance instance-00000060 to attach disk [datastore2] 4922985d-ad04-4c34-8dcb-6e6f8df94ff9/4922985d-ad04-4c34-8dcb-6e6f8df94ff9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.861010] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8463f53b-f2b3-4273-8492-198090aa0f4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.864658] env[68233]: DEBUG nova.compute.manager [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1027.864850] env[68233]: DEBUG nova.compute.manager [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing instance network info cache due to event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1027.865078] env[68233]: DEBUG oslo_concurrency.lockutils [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.865224] env[68233]: DEBUG oslo_concurrency.lockutils [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.865381] env[68233]: DEBUG nova.network.neutron [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1027.871339] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1027.871339] env[68233]: value = "task-2782903" [ 1027.871339] env[68233]: _type = "Task" [ 1027.871339] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.887073] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782903, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.934926] env[68233]: DEBUG nova.scheduler.client.report [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1027.956066] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782901, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.119458] env[68233]: DEBUG oslo_concurrency.lockutils [req-4ad54648-3e14-4dad-8d24-5325d37f0eca req-d8db9563-ec09-461a-8525-913a7f7e7988 service nova] Releasing lock "refresh_cache-619230c4-f642-4835-8c5a-84ece6610e0f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.266699] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782902, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.273892] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ed3298-38f0-87a4-e35c-643f47c0e0e2, 'name': SearchDatastore_Task, 'duration_secs': 0.017547} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.274733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.275351] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1028.277021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.277021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.277021] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1028.277021] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1acb154f-dfc4-4330-b33a-07481a12c759 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.294024] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1028.294024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1028.300049] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f685563-2c8d-46d0-8581-f1836203d56e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.307430] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1028.307430] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e88e45-46e6-8a7a-6714-8519d810d9ee" [ 1028.307430] env[68233]: _type = "Task" [ 1028.307430] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.318994] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e88e45-46e6-8a7a-6714-8519d810d9ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.382265] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782903, 'name': Rename_Task, 'duration_secs': 0.156476} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.382761] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.383080] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6855c93c-cbf8-489f-aa9e-a1d90980aa3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.390970] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1028.390970] env[68233]: value = "task-2782904" [ 1028.390970] env[68233]: _type = "Task" [ 1028.390970] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.407261] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782904, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.440398] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.529s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.441223] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1028.445775] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.311s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.445775] env[68233]: DEBUG nova.objects.instance [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lazy-loading 'resources' on Instance uuid a6b913f8-8ce5-4227-b36c-bc191d2e7907 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1028.456445] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782901, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.766892] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782902, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.744755} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.771447] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 5d99e0cb-9742-4a6c-84d0-f8d916ef9104/5d99e0cb-9742-4a6c-84d0-f8d916ef9104.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1028.771699] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1028.772455] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e78dc559-4702-4fae-88c6-bfb82286cb3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.780703] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1028.780703] env[68233]: value = "task-2782905" [ 1028.780703] env[68233]: _type = "Task" [ 1028.780703] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.790118] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782905, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.819809] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e88e45-46e6-8a7a-6714-8519d810d9ee, 'name': SearchDatastore_Task, 'duration_secs': 0.051905} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.820781] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19625f8b-ef7e-445b-801f-45bba0ef72b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.825583] env[68233]: DEBUG nova.network.neutron [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updated VIF entry in instance network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.826334] env[68233]: DEBUG nova.network.neutron [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapbe512a20-e9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.831647] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1028.831647] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526cfa8b-0f68-aa34-a58f-aa0d1dd7d7cf" [ 1028.831647] env[68233]: _type = "Task" [ 1028.831647] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.841765] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526cfa8b-0f68-aa34-a58f-aa0d1dd7d7cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.908028] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782904, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.952562] env[68233]: DEBUG nova.compute.utils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1028.969885] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1028.969971] env[68233]: DEBUG nova.network.neutron [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1028.972062] env[68233]: DEBUG oslo_vmware.api [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782901, 'name': PowerOnVM_Task, 'duration_secs': 1.078369} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.973021] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.012031] env[68233]: DEBUG nova.policy [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65225f2affe34ceda9a265989bddfc9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74a353ea173c4b8bb74b84032d4e12b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1029.082046] env[68233]: DEBUG nova.compute.manager [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.082329] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1207258-426f-45c8-9765-6ae56e9c9a77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.187446] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1029.275680] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299776ec-46e8-4516-9ccd-9172172cdd4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.287569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d0bd5a-a90f-41cb-86f6-dbeeee7caaa0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.295413] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782905, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133271} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.320449] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.321752] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826f10d8-6502-4ded-b89a-e500215fae78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.326463] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf6d9b-8faf-4434-8671-7afacf355f7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.334926] env[68233]: DEBUG oslo_concurrency.lockutils [req-a66a37c2-7686-4290-be49-7f243fffba33 req-be40168f-23d1-4eb2-ac61-fbf7413594b9 service nova] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.348851] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcd7231-a134-4b59-b3fd-1224bfb22f1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.362033] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 5d99e0cb-9742-4a6c-84d0-f8d916ef9104/5d99e0cb-9742-4a6c-84d0-f8d916ef9104.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.363249] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-218ca522-1c11-42ea-979b-1ba39dbd7d91 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.383302] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526cfa8b-0f68-aa34-a58f-aa0d1dd7d7cf, 'name': SearchDatastore_Task, 'duration_secs': 0.016671} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.391986] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.395297] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 619230c4-f642-4835-8c5a-84ece6610e0f/619230c4-f642-4835-8c5a-84ece6610e0f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1029.395297] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1029.395297] env[68233]: value = "task-2782906" [ 1029.395297] env[68233]: _type = "Task" [ 1029.395297] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.395297] env[68233]: DEBUG nova.compute.provider_tree [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.395297] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc351952-ee54-42e7-818b-de5ea396ddbb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.408421] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782904, 'name': PowerOnVM_Task, 'duration_secs': 0.567818} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.412449] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.412668] env[68233]: INFO nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Took 9.39 seconds to spawn the instance on the hypervisor. [ 1029.412842] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.413202] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1029.413202] env[68233]: value = "task-2782907" [ 1029.413202] env[68233]: _type = "Task" [ 1029.413202] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.413705] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.414440] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a737aeb-9342-4dac-abf5-9e6f895444fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.432075] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.470587] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1029.493638] env[68233]: DEBUG nova.network.neutron [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Successfully created port: dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1029.605152] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9def5083-9e8b-48e0-889d-e7c6471c12f5 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 24.955s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.902475] env[68233]: DEBUG nova.scheduler.client.report [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1029.912134] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.929390] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782907, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.937768] env[68233]: INFO nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Took 24.14 seconds to build instance. [ 1030.285156] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "19cf6f80-ff11-4881-896e-9fc162ded31e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.285626] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.408281] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782906, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.414196] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.970s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.416476] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.552s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.416706] env[68233]: DEBUG nova.objects.instance [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'resources' on Instance uuid 13972b73-8bae-4a2a-a987-b6177381e7c8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.426918] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.922302} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.427775] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 619230c4-f642-4835-8c5a-84ece6610e0f/619230c4-f642-4835-8c5a-84ece6610e0f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.428141] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.428329] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8c5e436d-4f97-4b10-bd0a-6cda9f977075 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.435147] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1030.435147] env[68233]: value = "task-2782908" [ 1030.435147] env[68233]: _type = "Task" [ 1030.435147] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.439328] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.654s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.444505] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782908, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.445738] env[68233]: INFO nova.scheduler.client.report [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Deleted allocations for instance a6b913f8-8ce5-4227-b36c-bc191d2e7907 [ 1030.483227] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1030.523212] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1030.523368] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1030.523664] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1030.525131] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1030.525430] env[68233]: DEBUG nova.virt.hardware [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1030.527146] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac7a2b8-293d-4540-be7c-4b73b0d76b98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.535771] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d003be-c04a-44ec-a6c9-b0109726b3d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.787750] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1030.841435] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.841786] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.842070] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "827711ac-ef52-41a0-9029-0a1805522a08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.842356] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.842541] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.845209] env[68233]: INFO nova.compute.manager [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Terminating instance [ 1030.918020] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782906, 'name': ReconfigVM_Task, 'duration_secs': 1.288259} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.918020] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 5d99e0cb-9742-4a6c-84d0-f8d916ef9104/5d99e0cb-9742-4a6c-84d0-f8d916ef9104.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.918020] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4dd587ab-0b66-4390-9483-9bf332056440 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.927018] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1030.927018] env[68233]: value = "task-2782909" [ 1030.927018] env[68233]: _type = "Task" [ 1030.927018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.941123] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782909, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.955239] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782908, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171979} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.955541] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.956383] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fb2d96-7c6e-496e-ab73-251eef7650e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.963324] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f7d63504-f748-47c0-a69c-4fb227e314ab tempest-ServerAddressesNegativeTestJSON-1229709589 tempest-ServerAddressesNegativeTestJSON-1229709589-project-member] Lock "a6b913f8-8ce5-4227-b36c-bc191d2e7907" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.629s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.989113] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 619230c4-f642-4835-8c5a-84ece6610e0f/619230c4-f642-4835-8c5a-84ece6610e0f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.992657] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a0dbfc5-1a24-43c1-9541-085f2fee7c40 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.017378] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1031.017378] env[68233]: value = "task-2782910" [ 1031.017378] env[68233]: _type = "Task" [ 1031.017378] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.028690] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782910, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.261283] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7aaf9bc-674d-4c8f-b796-246c74d4faec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.270215] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cd7f10-88b7-47cf-8270-37ac9a3f4206 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.311526] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c052467d-efd4-4405-aec3-66694a95b784 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.325908] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679fbc3a-a8ff-4b8b-addc-6ee58138b43a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.345444] env[68233]: DEBUG nova.compute.provider_tree [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1031.347563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.349662] env[68233]: DEBUG nova.compute.manager [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1031.349856] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1031.350661] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e402f31-1942-4ce6-8fc1-26c63844c357 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.359111] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.359759] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e84e72b4-cec7-4a3b-95f8-d4c04c35fed5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.366064] env[68233]: DEBUG oslo_vmware.api [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1031.366064] env[68233]: value = "task-2782911" [ 1031.366064] env[68233]: _type = "Task" [ 1031.366064] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.380358] env[68233]: DEBUG oslo_vmware.api [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.400801] env[68233]: DEBUG nova.compute.manager [req-2d058008-bccb-4d0c-84b6-ec37ed63b461 req-cef806a7-142a-43cc-b8b2-7b82ea09d5c7 service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Received event network-vif-plugged-dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1031.401062] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d058008-bccb-4d0c-84b6-ec37ed63b461 req-cef806a7-142a-43cc-b8b2-7b82ea09d5c7 service nova] Acquiring lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.401391] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d058008-bccb-4d0c-84b6-ec37ed63b461 req-cef806a7-142a-43cc-b8b2-7b82ea09d5c7 service nova] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.401600] env[68233]: DEBUG oslo_concurrency.lockutils [req-2d058008-bccb-4d0c-84b6-ec37ed63b461 req-cef806a7-142a-43cc-b8b2-7b82ea09d5c7 service nova] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1031.401787] env[68233]: DEBUG nova.compute.manager [req-2d058008-bccb-4d0c-84b6-ec37ed63b461 req-cef806a7-142a-43cc-b8b2-7b82ea09d5c7 service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] No waiting events found dispatching network-vif-plugged-dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1031.402059] env[68233]: WARNING nova.compute.manager [req-2d058008-bccb-4d0c-84b6-ec37ed63b461 req-cef806a7-142a-43cc-b8b2-7b82ea09d5c7 service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Received unexpected event network-vif-plugged-dafc44e7-03ce-48e2-b3a5-9f255fda5098 for instance with vm_state building and task_state spawning. [ 1031.439368] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782909, 'name': Rename_Task, 'duration_secs': 0.162051} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.439368] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1031.439368] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b997bb32-ff3f-4db3-8acc-cc6d7e37e1a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.448018] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1031.448018] env[68233]: value = "task-2782912" [ 1031.448018] env[68233]: _type = "Task" [ 1031.448018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.454905] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.530033] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782910, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.663015] env[68233]: DEBUG nova.network.neutron [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Successfully updated port: dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1031.832530] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985aa360-5480-42d1-b1f2-72efa7bc06bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.839397] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Suspending the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1031.839689] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-9373ed77-412e-4e55-b811-bfb0555384e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.846851] env[68233]: DEBUG oslo_vmware.api [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1031.846851] env[68233]: value = "task-2782913" [ 1031.846851] env[68233]: _type = "Task" [ 1031.846851] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.858785] env[68233]: DEBUG oslo_vmware.api [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782913, 'name': SuspendVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.878998] env[68233]: DEBUG oslo_vmware.api [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782911, 'name': PowerOffVM_Task, 'duration_secs': 0.276485} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.878998] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1031.878998] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1031.878998] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-65e994e7-1435-4b7e-abb0-85cc50d895a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.880392] env[68233]: ERROR nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [req-cdaee9e0-a1a2-4aa9-b2b5-ac513ebda036] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-cdaee9e0-a1a2-4aa9-b2b5-ac513ebda036"}]} [ 1031.898682] env[68233]: DEBUG nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1031.921203] env[68233]: DEBUG nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1031.921478] env[68233]: DEBUG nova.compute.provider_tree [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1031.935197] env[68233]: DEBUG nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1031.955860] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1031.956121] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1031.956562] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Deleting the datastore file [datastore2] 827711ac-ef52-41a0-9029-0a1805522a08 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1031.959602] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b61b2c2e-6567-43ce-85ee-76cbf52dc5ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.961684] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782912, 'name': PowerOnVM_Task, 'duration_secs': 0.497423} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.962792] env[68233]: DEBUG nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1031.965364] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.965621] env[68233]: INFO nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Took 9.43 seconds to spawn the instance on the hypervisor. [ 1031.965862] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.967396] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c6d794-3a4e-4bc3-ac98-f901226333e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.972189] env[68233]: DEBUG oslo_vmware.api [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for the task: (returnval){ [ 1031.972189] env[68233]: value = "task-2782915" [ 1031.972189] env[68233]: _type = "Task" [ 1031.972189] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.988265] env[68233]: DEBUG oslo_vmware.api [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.038948] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782910, 'name': ReconfigVM_Task, 'duration_secs': 0.632058} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.039435] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 619230c4-f642-4835-8c5a-84ece6610e0f/619230c4-f642-4835-8c5a-84ece6610e0f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1032.040371] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55a1424a-e331-4203-87bb-79d34073a760 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.051494] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1032.051494] env[68233]: value = "task-2782916" [ 1032.051494] env[68233]: _type = "Task" [ 1032.051494] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.064458] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782916, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.169592] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-e95e2309-1df5-466b-bb8a-0c9188dc07c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.169798] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-e95e2309-1df5-466b-bb8a-0c9188dc07c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.169957] env[68233]: DEBUG nova.network.neutron [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1032.258435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a416f257-bad2-4624-926a-f45d475df783 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.267930] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-defa3abc-45f9-4e8f-b66c-6722a4029867 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.303048] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045209c5-ff2e-47d4-bdee-684b52216e98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.311397] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d5b084-b86b-45ff-ab95-ec504ea756de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.326650] env[68233]: DEBUG nova.compute.provider_tree [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1032.356580] env[68233]: DEBUG oslo_vmware.api [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782913, 'name': SuspendVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.482907] env[68233]: DEBUG oslo_vmware.api [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Task: {'id': task-2782915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360334} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.483188] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1032.483393] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1032.483591] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1032.483784] env[68233]: INFO nova.compute.manager [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1032.484078] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.488649] env[68233]: DEBUG nova.compute.manager [-] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1032.488649] env[68233]: DEBUG nova.network.neutron [-] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1032.492544] env[68233]: INFO nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Took 26.66 seconds to build instance. [ 1032.561835] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782916, 'name': Rename_Task, 'duration_secs': 0.243855} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.562183] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.562533] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65ecf872-6fb3-4543-bad4-e4d6013f7ff4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.569377] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1032.569377] env[68233]: value = "task-2782917" [ 1032.569377] env[68233]: _type = "Task" [ 1032.569377] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.581504] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782917, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.858997] env[68233]: DEBUG oslo_vmware.api [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782913, 'name': SuspendVM_Task, 'duration_secs': 0.628328} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.858997] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Suspended the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1032.859225] env[68233]: DEBUG nova.compute.manager [None req-1d3c6672-9138-42cf-8d99-69d4a1c256c9 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.860238] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ef96f-8516-4309-8dcd-8f9b59197b77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.869970] env[68233]: DEBUG nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1032.869970] env[68233]: DEBUG nova.compute.provider_tree [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 125 to 126 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1032.870987] env[68233]: DEBUG nova.compute.provider_tree [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1032.958730] env[68233]: DEBUG nova.network.neutron [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1032.997709] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.175s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.081198] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782917, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.292306] env[68233]: DEBUG nova.network.neutron [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Updating instance_info_cache with network_info: [{"id": "dafc44e7-03ce-48e2-b3a5-9f255fda5098", "address": "fa:16:3e:04:d5:a6", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdafc44e7-03", "ovs_interfaceid": "dafc44e7-03ce-48e2-b3a5-9f255fda5098", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.379543] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.963s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.383422] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.123s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1033.384907] env[68233]: INFO nova.compute.claims [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1033.450897] env[68233]: INFO nova.scheduler.client.report [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleted allocations for instance 13972b73-8bae-4a2a-a987-b6177381e7c8 [ 1033.452027] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.454565] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.465323] env[68233]: DEBUG nova.compute.manager [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Received event network-changed-dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1033.465422] env[68233]: DEBUG nova.compute.manager [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Refreshing instance network info cache due to event network-changed-dafc44e7-03ce-48e2-b3a5-9f255fda5098. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1033.465536] env[68233]: DEBUG oslo_concurrency.lockutils [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] Acquiring lock "refresh_cache-e95e2309-1df5-466b-bb8a-0c9188dc07c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.580837] env[68233]: DEBUG oslo_vmware.api [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782917, 'name': PowerOnVM_Task, 'duration_secs': 0.937} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.581474] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.581474] env[68233]: INFO nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1033.581587] env[68233]: DEBUG nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.582398] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e8b6bc-ac45-4663-9067-b530558cb576 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.798053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-e95e2309-1df5-466b-bb8a-0c9188dc07c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.798053] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance network_info: |[{"id": "dafc44e7-03ce-48e2-b3a5-9f255fda5098", "address": "fa:16:3e:04:d5:a6", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdafc44e7-03", "ovs_interfaceid": "dafc44e7-03ce-48e2-b3a5-9f255fda5098", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1033.798053] env[68233]: DEBUG oslo_concurrency.lockutils [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] Acquired lock "refresh_cache-e95e2309-1df5-466b-bb8a-0c9188dc07c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.798053] env[68233]: DEBUG nova.network.neutron [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Refreshing network info cache for port dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1033.798053] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:d5:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dafc44e7-03ce-48e2-b3a5-9f255fda5098', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1033.811472] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1033.814161] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1033.814161] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-025fed27-b3ec-4942-95aa-85808cbb74e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.836679] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1033.836679] env[68233]: value = "task-2782918" [ 1033.836679] env[68233]: _type = "Task" [ 1033.836679] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.844991] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782918, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.965954] env[68233]: DEBUG oslo_concurrency.lockutils [None req-198cf6e4-36ab-48f2-929f-7af4540b0546 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "13972b73-8bae-4a2a-a987-b6177381e7c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.236s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.972614] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.974792] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.975260] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.975436] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.975585] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.975731] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.975867] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1033.976639] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.015830] env[68233]: DEBUG nova.network.neutron [-] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.102231] env[68233]: INFO nova.compute.manager [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Took 28.21 seconds to build instance. [ 1034.348764] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782918, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.475511] env[68233]: INFO nova.compute.manager [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Resuming [ 1034.476137] env[68233]: DEBUG nova.objects.instance [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lazy-loading 'flavor' on Instance uuid dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1034.480582] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.518042] env[68233]: INFO nova.compute.manager [-] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Took 2.03 seconds to deallocate network for instance. [ 1034.605434] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f1fffd9e-ba78-41f4-adaf-5effee004906 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "619230c4-f642-4835-8c5a-84ece6610e0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.734s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.686231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353eba9d-f02d-4403-a4f8-c559e5cc89c7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.689645] env[68233]: DEBUG nova.network.neutron [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Updated VIF entry in instance network info cache for port dafc44e7-03ce-48e2-b3a5-9f255fda5098. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1034.690096] env[68233]: DEBUG nova.network.neutron [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Updating instance_info_cache with network_info: [{"id": "dafc44e7-03ce-48e2-b3a5-9f255fda5098", "address": "fa:16:3e:04:d5:a6", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdafc44e7-03", "ovs_interfaceid": "dafc44e7-03ce-48e2-b3a5-9f255fda5098", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.696636] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec200da-dadd-4f66-9098-b6a24ddc4802 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.736432] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5ffa66-1c3a-430c-bb4b-109b4e68f666 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.744729] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16af8c9-45a3-49a9-ac79-9944b58ede6d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.760287] env[68233]: DEBUG nova.compute.provider_tree [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.833137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4677d047-f8dc-4501-be9b-14e6a2222f46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.833137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.833137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4677d047-f8dc-4501-be9b-14e6a2222f46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.833137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.833137] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.836862] env[68233]: INFO nova.compute.manager [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Terminating instance [ 1034.848321] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782918, 'name': CreateVM_Task, 'duration_secs': 0.517223} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.849117] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1034.849830] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.850094] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.850477] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1034.851209] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99fc9fe6-19f4-4aba-b16b-212157a0d530 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.856312] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1034.856312] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522615ac-dceb-f285-780b-c29adab38157" [ 1034.856312] env[68233]: _type = "Task" [ 1034.856312] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.864958] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522615ac-dceb-f285-780b-c29adab38157, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.025049] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.194611] env[68233]: DEBUG oslo_concurrency.lockutils [req-7185fb37-bcdc-4297-8781-58aa74f07a92 req-5a80769e-55d9-426d-a41a-b9ad61383bbf service nova] Releasing lock "refresh_cache-e95e2309-1df5-466b-bb8a-0c9188dc07c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.264534] env[68233]: DEBUG nova.scheduler.client.report [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 174, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1035.344556] env[68233]: DEBUG nova.compute.manager [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1035.344743] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.345817] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a512d08e-cc9d-4aa6-b111-0937cb08a32d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.353577] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.353577] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-829cbfe5-44e5-4dfb-82a8-c462812cff61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.360796] env[68233]: DEBUG oslo_vmware.api [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1035.360796] env[68233]: value = "task-2782920" [ 1035.360796] env[68233]: _type = "Task" [ 1035.360796] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.367736] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522615ac-dceb-f285-780b-c29adab38157, 'name': SearchDatastore_Task, 'duration_secs': 0.009834} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.368484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.368808] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.369104] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.369295] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.369562] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.369855] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0eac36b2-c079-43c9-9db8-949d9c6ba9f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.374741] env[68233]: DEBUG oslo_vmware.api [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.382062] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.382446] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.383420] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de21c122-eac1-46a3-8b16-d1db9473e284 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.388980] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1035.388980] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521c665e-8bdd-b278-2dae-9d32e3c26a62" [ 1035.388980] env[68233]: _type = "Task" [ 1035.388980] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.397503] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521c665e-8bdd-b278-2dae-9d32e3c26a62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.511498] env[68233]: DEBUG nova.compute.manager [req-3f7b049f-a398-4cb3-b053-53652726b74c req-0fe70c66-78d3-48fd-9dc4-64804bf2b580 service nova] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Received event network-vif-deleted-b710ae65-1e11-4b1c-8389-3094fbf99637 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1035.527732] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.528017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.528279] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.528470] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.528643] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.532695] env[68233]: INFO nova.compute.manager [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Terminating instance [ 1035.774383] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.774921] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1035.781107] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.615s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.781261] env[68233]: DEBUG nova.objects.instance [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'resources' on Instance uuid 85313d15-04da-4f24-b203-bed5ebcbe1a9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.873146] env[68233]: DEBUG oslo_vmware.api [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782920, 'name': PowerOffVM_Task, 'duration_secs': 0.230342} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.873810] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.873810] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1035.873914] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a70a22ae-92b8-4a5f-bd01-72dfbefead90 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.900020] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521c665e-8bdd-b278-2dae-9d32e3c26a62, 'name': SearchDatastore_Task, 'duration_secs': 0.017082} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.904024] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cbef728-5e3b-40ce-814c-2a07fc5e3c9d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.906387] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1035.906387] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525cac55-172e-b04b-b28b-2367f3ce4a5b" [ 1035.906387] env[68233]: _type = "Task" [ 1035.906387] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.914109] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525cac55-172e-b04b-b28b-2367f3ce4a5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.946234] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1035.946477] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1035.946668] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleting the datastore file [datastore2] 4677d047-f8dc-4501-be9b-14e6a2222f46 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1035.946998] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d19063fb-3229-429b-a977-87c2124d6745 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.954032] env[68233]: DEBUG oslo_vmware.api [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1035.954032] env[68233]: value = "task-2782922" [ 1035.954032] env[68233]: _type = "Task" [ 1035.954032] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.964417] env[68233]: DEBUG oslo_vmware.api [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.992847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.993200] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquired lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.993491] env[68233]: DEBUG nova.network.neutron [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1036.043089] env[68233]: DEBUG nova.compute.manager [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.043089] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.043089] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44b5e17-a422-49f3-bff7-494a3324ccb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.052018] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.052018] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e09af6a4-d6be-41aa-acc9-d465737b50cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.057822] env[68233]: DEBUG oslo_vmware.api [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1036.057822] env[68233]: value = "task-2782923" [ 1036.057822] env[68233]: _type = "Task" [ 1036.057822] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.067435] env[68233]: DEBUG oslo_vmware.api [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.286013] env[68233]: DEBUG nova.compute.utils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1036.286570] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1036.286737] env[68233]: DEBUG nova.network.neutron [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1036.375559] env[68233]: DEBUG nova.policy [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'af2bbbfcc44f4432987807f8b20c6776', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3636e6c8e70e4996ac83a672732a1ff6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1036.420194] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525cac55-172e-b04b-b28b-2367f3ce4a5b, 'name': SearchDatastore_Task, 'duration_secs': 0.018297} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.420361] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.420783] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.423896] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da3767d3-042b-4829-999c-90b6d2451c75 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.430770] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1036.430770] env[68233]: value = "task-2782924" [ 1036.430770] env[68233]: _type = "Task" [ 1036.430770] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.439363] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.466324] env[68233]: DEBUG oslo_vmware.api [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2782922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141972} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.466772] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.467070] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.467353] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.467624] env[68233]: INFO nova.compute.manager [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1036.467976] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1036.468426] env[68233]: DEBUG nova.compute.manager [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1036.468595] env[68233]: DEBUG nova.network.neutron [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1036.573651] env[68233]: DEBUG oslo_vmware.api [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782923, 'name': PowerOffVM_Task, 'duration_secs': 0.268079} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.574763] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.575492] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.581894] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e55d7a5-bbb0-49f2-bc5a-304fc2761f38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.621194] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984b33c7-140f-49cb-942d-1f557a2ac251 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.629150] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3decda6-d962-4f02-84f4-68c9fd4995a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.670018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-628e2aa2-4d63-48e5-b62c-79ae48fc237c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.676416] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.676666] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.676850] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleting the datastore file [datastore2] 4922985d-ad04-4c34-8dcb-6e6f8df94ff9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.679048] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d64836f4-487e-4ca7-9b18-0a96009a8569 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.682275] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b3d6f0-b798-4f2c-9a68-fa2750991f8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.696419] env[68233]: DEBUG nova.compute.provider_tree [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1036.701147] env[68233]: DEBUG oslo_vmware.api [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1036.701147] env[68233]: value = "task-2782926" [ 1036.701147] env[68233]: _type = "Task" [ 1036.701147] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.707111] env[68233]: DEBUG oslo_vmware.api [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.792759] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1036.942731] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782924, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.986605] env[68233]: DEBUG nova.network.neutron [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Successfully created port: ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1037.102326] env[68233]: DEBUG nova.network.neutron [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [{"id": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "address": "fa:16:3e:cf:5f:ff", "network": {"id": "575b02a9-230a-4aaf-98a2-c1889f1b59af", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1070436091-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "978c6dbf1c10443da3253a58f1e5bdea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4b033f4d-2e92-4702-add6-410a29d3f251", "external-id": "nsx-vlan-transportzone-649", "segmentation_id": 649, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9478083-21", "ovs_interfaceid": "d9478083-21a3-4b61-ab65-e1281b8bac7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.222823] env[68233]: DEBUG oslo_vmware.api [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.511313} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.223182] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.223182] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.224246] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.224246] env[68233]: INFO nova.compute.manager [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1037.224246] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.224246] env[68233]: DEBUG nova.compute.manager [-] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.224246] env[68233]: DEBUG nova.network.neutron [-] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.235897] env[68233]: DEBUG nova.scheduler.client.report [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 126 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1037.236285] env[68233]: DEBUG nova.compute.provider_tree [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 126 to 127 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1037.236495] env[68233]: DEBUG nova.compute.provider_tree [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1037.443476] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758779} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.443800] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.444019] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.444284] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36e60c0c-ce4f-44e6-bac6-9760112953d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.452068] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1037.452068] env[68233]: value = "task-2782927" [ 1037.452068] env[68233]: _type = "Task" [ 1037.452068] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.461764] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.610423] env[68233]: DEBUG oslo_concurrency.lockutils [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Releasing lock "refresh_cache-dca145c8-ed95-4dfb-9534-37035c75dafb" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.610423] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d59890-6372-4b35-8b31-b88e0a3ff49e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.622222] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Resuming the VM {{(pid=68233) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1037.622645] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0c54f8f-ebf7-4549-a915-85a81e1114e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.631518] env[68233]: DEBUG oslo_vmware.api [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1037.631518] env[68233]: value = "task-2782928" [ 1037.631518] env[68233]: _type = "Task" [ 1037.631518] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.641879] env[68233]: DEBUG oslo_vmware.api [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.709019] env[68233]: DEBUG nova.compute.manager [req-377427eb-9ec2-442c-a3f9-c278351c2db4 req-711839e1-3339-4d89-b980-1c0fb8b5e640 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Received event network-vif-deleted-ef237162-2628-4a17-9afd-7a418911f222 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1037.709019] env[68233]: INFO nova.compute.manager [req-377427eb-9ec2-442c-a3f9-c278351c2db4 req-711839e1-3339-4d89-b980-1c0fb8b5e640 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Neutron deleted interface ef237162-2628-4a17-9afd-7a418911f222; detaching it from the instance and deleting it from the info cache [ 1037.709019] env[68233]: DEBUG nova.network.neutron [req-377427eb-9ec2-442c-a3f9-c278351c2db4 req-711839e1-3339-4d89-b980-1c0fb8b5e640 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.728681] env[68233]: DEBUG nova.compute.manager [req-22bec674-2afc-4852-9808-61bf60072f82 req-6d01b355-0820-46d4-a1af-3f99c7a68f45 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Received event network-vif-deleted-50656146-977c-4d5e-b10d-73efad3f7bef {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1037.728932] env[68233]: INFO nova.compute.manager [req-22bec674-2afc-4852-9808-61bf60072f82 req-6d01b355-0820-46d4-a1af-3f99c7a68f45 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Neutron deleted interface 50656146-977c-4d5e-b10d-73efad3f7bef; detaching it from the instance and deleting it from the info cache [ 1037.729129] env[68233]: DEBUG nova.network.neutron [req-22bec674-2afc-4852-9808-61bf60072f82 req-6d01b355-0820-46d4-a1af-3f99c7a68f45 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.741471] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.960s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.744533] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.479s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.744775] env[68233]: DEBUG nova.objects.instance [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'resources' on Instance uuid d4b69710-7f74-4755-8783-63e36c67f57a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.769979] env[68233]: INFO nova.scheduler.client.report [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocations for instance 85313d15-04da-4f24-b203-bed5ebcbe1a9 [ 1037.802199] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1037.832693] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1037.833013] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1037.833844] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1037.834080] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1037.834385] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1037.834443] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1037.834660] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1037.834823] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1037.834998] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1037.835182] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1037.835356] env[68233]: DEBUG nova.virt.hardware [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1037.837228] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a72f14-50c5-4142-96c0-f629ebd26eb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.844878] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea004c11-21c7-4e44-bab4-5fb3ca71f2e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.963065] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.278888} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.963065] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1037.964302] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656770d8-6b03-4572-8d95-5754edd1e483 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.986738] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.987193] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8e8a6bfb-3c27-41ab-89fe-803026c7d4de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.009403] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1038.009403] env[68233]: value = "task-2782929" [ 1038.009403] env[68233]: _type = "Task" [ 1038.009403] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.018402] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782929, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.095012] env[68233]: DEBUG nova.network.neutron [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.127366] env[68233]: DEBUG nova.network.neutron [-] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.145911] env[68233]: DEBUG oslo_vmware.api [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782928, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.211034] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1458bb7d-4ac0-42a2-9f9f-18ded852672b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.222243] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee30dd8-6f19-4419-9aa7-d581bec331d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.235266] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-04d75eb2-c3bd-4401-b0af-5f6ce8aa386d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.244251] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1375cfd7-3f8b-4a1a-a6c9-42a8fa5f6fab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.265736] env[68233]: DEBUG nova.objects.instance [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'numa_topology' on Instance uuid d4b69710-7f74-4755-8783-63e36c67f57a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1038.267361] env[68233]: DEBUG nova.compute.manager [req-377427eb-9ec2-442c-a3f9-c278351c2db4 req-711839e1-3339-4d89-b980-1c0fb8b5e640 service nova] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Detach interface failed, port_id=ef237162-2628-4a17-9afd-7a418911f222, reason: Instance 4677d047-f8dc-4501-be9b-14e6a2222f46 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1038.278737] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa74dc13-033d-4256-b3e8-0cdb5cf193c1 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "85313d15-04da-4f24-b203-bed5ebcbe1a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.027s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.293561] env[68233]: DEBUG nova.compute.manager [req-22bec674-2afc-4852-9808-61bf60072f82 req-6d01b355-0820-46d4-a1af-3f99c7a68f45 service nova] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Detach interface failed, port_id=50656146-977c-4d5e-b10d-73efad3f7bef, reason: Instance 4922985d-ad04-4c34-8dcb-6e6f8df94ff9 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1038.519535] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.598492] env[68233]: INFO nova.compute.manager [-] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Took 2.13 seconds to deallocate network for instance. [ 1038.631831] env[68233]: INFO nova.compute.manager [-] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Took 1.41 seconds to deallocate network for instance. [ 1038.645884] env[68233]: DEBUG oslo_vmware.api [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782928, 'name': PowerOnVM_Task, 'duration_secs': 0.603037} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.645884] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Resumed the VM {{(pid=68233) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1038.646181] env[68233]: DEBUG nova.compute.manager [None req-56ce3e42-3342-4b14-9fda-2242833bab4e tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.646858] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb39a80-66db-4ce7-aa6d-c9ad519707b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.770189] env[68233]: DEBUG nova.objects.base [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1038.949957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.950316] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.950534] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.950717] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.950886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.955596] env[68233]: INFO nova.compute.manager [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Terminating instance [ 1038.992236] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64622fd2-450f-48b5-8aa8-692e0dbf169e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.000351] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f26191a-e20e-4669-a610-c4b8f158a84c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.051474] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19879848-2bad-4272-87bc-a8332ca6028b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.066237] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1fd93c-2fb2-4efd-a0b6-cdddf0171777 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.070493] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.086541] env[68233]: DEBUG nova.compute.provider_tree [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.104832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.139271] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.281509] env[68233]: DEBUG nova.network.neutron [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Successfully updated port: ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1039.461958] env[68233]: DEBUG nova.compute.manager [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1039.461958] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1039.462618] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b29846-ccdd-4aca-b30d-10786be6db09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.470721] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1039.470976] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-555783c5-4662-485b-825a-18ca577b0787 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.477548] env[68233]: DEBUG oslo_vmware.api [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1039.477548] env[68233]: value = "task-2782930" [ 1039.477548] env[68233]: _type = "Task" [ 1039.477548] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.485302] env[68233]: DEBUG oslo_vmware.api [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.557132] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782929, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.589626] env[68233]: DEBUG nova.scheduler.client.report [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1039.736994] env[68233]: DEBUG nova.compute.manager [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Received event network-vif-plugged-ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1039.737250] env[68233]: DEBUG oslo_concurrency.lockutils [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] Acquiring lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.737250] env[68233]: DEBUG oslo_concurrency.lockutils [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.737402] env[68233]: DEBUG oslo_concurrency.lockutils [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.737566] env[68233]: DEBUG nova.compute.manager [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] No waiting events found dispatching network-vif-plugged-ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1039.737730] env[68233]: WARNING nova.compute.manager [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Received unexpected event network-vif-plugged-ab615646-115b-4015-a2c3-db87dc950fcf for instance with vm_state building and task_state spawning. [ 1039.737886] env[68233]: DEBUG nova.compute.manager [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Received event network-changed-ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1039.738166] env[68233]: DEBUG nova.compute.manager [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Refreshing instance network info cache due to event network-changed-ab615646-115b-4015-a2c3-db87dc950fcf. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1039.738394] env[68233]: DEBUG oslo_concurrency.lockutils [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] Acquiring lock "refresh_cache-9b7df182-5830-45a2-b50d-b3564a7e0b6c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.738536] env[68233]: DEBUG oslo_concurrency.lockutils [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] Acquired lock "refresh_cache-9b7df182-5830-45a2-b50d-b3564a7e0b6c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.738694] env[68233]: DEBUG nova.network.neutron [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Refreshing network info cache for port ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.786710] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "refresh_cache-9b7df182-5830-45a2-b50d-b3564a7e0b6c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.988261] env[68233]: DEBUG oslo_vmware.api [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782930, 'name': PowerOffVM_Task, 'duration_secs': 0.408547} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.988601] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.988818] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1039.989087] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14173570-1fd7-48dc-b0b1-339d5663fad0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.058364] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782929, 'name': ReconfigVM_Task, 'duration_secs': 1.667814} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.058584] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Reconfigured VM instance instance-00000063 to attach disk [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.059252] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80ca62f2-cba9-42d8-82b8-6399985f3af2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.065134] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1040.065134] env[68233]: value = "task-2782932" [ 1040.065134] env[68233]: _type = "Task" [ 1040.065134] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.072706] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782932, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.094744] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.350s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.097391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.082s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.139117] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.139445] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.139701] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleting the datastore file [datastore2] 2812bf7c-5117-4fd9-9330-0cc94277bf5d {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.143389] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e4483d1-cc1b-455e-a834-4e0c21b06a0b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.150801] env[68233]: DEBUG oslo_vmware.api [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1040.150801] env[68233]: value = "task-2782933" [ 1040.150801] env[68233]: _type = "Task" [ 1040.150801] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.160083] env[68233]: DEBUG oslo_vmware.api [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.288239] env[68233]: DEBUG nova.network.neutron [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.371905] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355b4915-d56f-4440-94de-fa039a53de86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.379628] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0388a8-3e34-4fe0-bd1b-36d694d51d51 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.421773] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e224fbe-80d5-46a5-921b-940733625379 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.429682] env[68233]: DEBUG nova.network.neutron [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.435021] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6ebf38-69c5-4294-8860-911065d7ceb5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.447526] env[68233]: DEBUG nova.compute.provider_tree [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.575635] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782932, 'name': Rename_Task, 'duration_secs': 0.177313} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.575920] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.576186] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5572f146-62c7-4b52-af9f-532b494046c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.582494] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1040.582494] env[68233]: value = "task-2782934" [ 1040.582494] env[68233]: _type = "Task" [ 1040.582494] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.591710] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.606096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-62f0f658-8f5e-435f-b2bd-9b4d7486eed7 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 48.046s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.606969] env[68233]: DEBUG oslo_concurrency.lockutils [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 24.618s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.607232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.607454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.607621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.609365] env[68233]: INFO nova.compute.manager [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Terminating instance [ 1040.660044] env[68233]: DEBUG oslo_vmware.api [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.660357] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.660545] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1040.660724] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1040.660898] env[68233]: INFO nova.compute.manager [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1040.661158] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.661350] env[68233]: DEBUG nova.compute.manager [-] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1040.661441] env[68233]: DEBUG nova.network.neutron [-] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1040.938441] env[68233]: DEBUG oslo_concurrency.lockutils [req-c32dfbee-bbd7-414c-a730-a5565d0853fc req-2d88d754-0ffe-455f-ac31-3976bc994784 service nova] Releasing lock "refresh_cache-9b7df182-5830-45a2-b50d-b3564a7e0b6c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.939287] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "refresh_cache-9b7df182-5830-45a2-b50d-b3564a7e0b6c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.939671] env[68233]: DEBUG nova.network.neutron [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.951577] env[68233]: DEBUG nova.scheduler.client.report [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.093082] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782934, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.113920] env[68233]: DEBUG nova.compute.manager [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1041.114219] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1041.114550] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2df38fda-d9f2-4f26-9e18-b850f49f245b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.125088] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f962cee-aced-46b5-a04e-fbc3b22a491e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.158946] env[68233]: WARNING nova.virt.vmwareapi.vmops [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d4b69710-7f74-4755-8783-63e36c67f57a could not be found. [ 1041.159175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.159362] env[68233]: INFO nova.compute.manager [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1041.159614] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.159864] env[68233]: DEBUG nova.compute.manager [-] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.159980] env[68233]: DEBUG nova.network.neutron [-] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.459077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.362s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.459322] env[68233]: INFO nova.compute.manager [None req-1f7a84a2-39be-462f-8e43-76762536ce31 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Successfully reverted task state from rebuilding on failure for instance. [ 1041.466951] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.223s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.470430] env[68233]: DEBUG nova.objects.instance [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lazy-loading 'resources' on Instance uuid 287df4d5-4e98-464d-8f0a-4571c1e4df4f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.486321] env[68233]: DEBUG nova.network.neutron [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.535764] env[68233]: DEBUG nova.network.neutron [-] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.593890] env[68233]: DEBUG oslo_vmware.api [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782934, 'name': PowerOnVM_Task, 'duration_secs': 0.643108} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.598026] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.598026] env[68233]: INFO nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Took 11.11 seconds to spawn the instance on the hypervisor. [ 1041.598026] env[68233]: DEBUG nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.598026] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2497986-d8dd-489a-9c48-36de615c9177 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.635127] env[68233]: DEBUG nova.network.neutron [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Updating instance_info_cache with network_info: [{"id": "ab615646-115b-4015-a2c3-db87dc950fcf", "address": "fa:16:3e:e0:2d:90", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab615646-11", "ovs_interfaceid": "ab615646-115b-4015-a2c3-db87dc950fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.767604] env[68233]: DEBUG nova.compute.manager [req-d0f9eef3-2f7c-47e2-b77f-b4f6b91615bb req-666f40ef-fdd5-4a68-a966-ef7d32b37679 service nova] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Received event network-vif-deleted-757e5be0-4fd2-40d3-b5fd-a6667126afc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1042.045086] env[68233]: INFO nova.compute.manager [-] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Took 1.38 seconds to deallocate network for instance. [ 1042.046788] env[68233]: DEBUG nova.network.neutron [-] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.122398] env[68233]: INFO nova.compute.manager [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Took 31.11 seconds to build instance. [ 1042.138298] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "refresh_cache-9b7df182-5830-45a2-b50d-b3564a7e0b6c" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.138407] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Instance network_info: |[{"id": "ab615646-115b-4015-a2c3-db87dc950fcf", "address": "fa:16:3e:e0:2d:90", "network": {"id": "5ac85be5-912a-40f7-ab75-c8f92fd03aea", "bridge": "br-int", "label": "tempest-ServersTestJSON-1005153090-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3636e6c8e70e4996ac83a672732a1ff6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7aa6264-122d-4c35-82d0-860e451538ea", "external-id": "nsx-vlan-transportzone-81", "segmentation_id": 81, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab615646-11", "ovs_interfaceid": "ab615646-115b-4015-a2c3-db87dc950fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1042.138786] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:2d:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7aa6264-122d-4c35-82d0-860e451538ea', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab615646-115b-4015-a2c3-db87dc950fcf', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1042.147362] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1042.150585] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1042.151276] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36351360-8818-4877-aff6-cbecc567c6b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.174331] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.174331] env[68233]: value = "task-2782935" [ 1042.174331] env[68233]: _type = "Task" [ 1042.174331] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.183165] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782935, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.244879] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58184940-0e7d-4ff4-a9b3-4c85d43bd4e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.252988] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82463b0-3274-4fec-ad14-c9beb84a262b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.282925] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe7e17b-f081-4561-af55-c1a34c77d4e7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.290523] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b841e9ea-c2a4-4fdd-9116-a43e0aa59662 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.305308] env[68233]: DEBUG nova.compute.provider_tree [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.552534] env[68233]: INFO nova.compute.manager [-] [instance: d4b69710-7f74-4755-8783-63e36c67f57a] Took 1.39 seconds to deallocate network for instance. [ 1042.557704] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.628305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a2d487e3-a3f6-4de7-8da7-f679d2f0fbfb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.629s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.684319] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782935, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.808758] env[68233]: DEBUG nova.scheduler.client.report [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1042.983261] env[68233]: INFO nova.compute.manager [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Rebuilding instance [ 1043.024243] env[68233]: DEBUG nova.compute.manager [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.025105] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51060eb6-190f-4b59-827c-ad140f2d08c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.185617] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782935, 'name': CreateVM_Task, 'duration_secs': 0.748228} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.186132] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1043.186554] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.186749] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.187101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1043.187374] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28be67aa-9208-4174-90fd-f3086e7e827e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.192123] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1043.192123] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d4bb19-cd80-1c5f-2a1a-9a43ab62366e" [ 1043.192123] env[68233]: _type = "Task" [ 1043.192123] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.200599] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d4bb19-cd80-1c5f-2a1a-9a43ab62366e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.313385] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.315810] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.732s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1043.316064] env[68233]: DEBUG nova.objects.instance [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'resources' on Instance uuid 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.584263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-514873fd-6095-4027-b717-b719e9f047be tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "d4b69710-7f74-4755-8783-63e36c67f57a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.977s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.702500] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d4bb19-cd80-1c5f-2a1a-9a43ab62366e, 'name': SearchDatastore_Task, 'duration_secs': 0.011744} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.702798] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.703040] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1043.703315] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.703470] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.703651] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1043.703909] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9b73bd3-5f05-461c-9e20-81217d099178 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.712409] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1043.712598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1043.713345] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eed216ec-215e-49d6-8959-e5c709503e7e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.720535] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1043.720535] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5216dd2c-edb1-e398-0945-4d29ca4f9dda" [ 1043.720535] env[68233]: _type = "Task" [ 1043.720535] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.728146] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5216dd2c-edb1-e398-0945-4d29ca4f9dda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.818470] env[68233]: DEBUG nova.objects.instance [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'numa_topology' on Instance uuid 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1043.834795] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e495841f-0dd8-4160-aac2-df17fd621ae6 tempest-ServerActionsV293TestJSON-1415907044 tempest-ServerActionsV293TestJSON-1415907044-project-member] Lock "287df4d5-4e98-464d-8f0a-4571c1e4df4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.718s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.038594] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1044.038951] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f83a2d19-f749-45c6-b369-e37edb668fb4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.046720] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1044.046720] env[68233]: value = "task-2782936" [ 1044.046720] env[68233]: _type = "Task" [ 1044.046720] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.054562] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782936, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.231467] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5216dd2c-edb1-e398-0945-4d29ca4f9dda, 'name': SearchDatastore_Task, 'duration_secs': 0.01188} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.232487] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5edb34d8-797f-4cfd-9a47-35448f78d79c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.238951] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1044.238951] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b58cba-4191-8639-a522-be6de8ac16c9" [ 1044.238951] env[68233]: _type = "Task" [ 1044.238951] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.246829] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b58cba-4191-8639-a522-be6de8ac16c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.321698] env[68233]: DEBUG nova.objects.base [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Object Instance<62cd066c-5eac-4f07-bf4e-9275fedc7384> lazy-loaded attributes: resources,numa_topology {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1044.556514] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782936, 'name': PowerOffVM_Task, 'duration_secs': 0.350175} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.557611] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1044.557888] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1044.558636] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd45f67-e02e-4178-8e2e-9c86632ed638 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.561633] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a26d659-f6ec-495f-803b-8f2e18156308 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.568329] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1044.569741] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7dd805a5-eaa7-491f-a7ed-94ae19bde299 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.571767] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c2cf03-b7c9-48ab-a007-bca721beb2ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.603176] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3870bd8f-9441-4781-afc3-12572825fd2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.610720] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dfc6048-ca15-4e03-8232-4d599f08b065 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.624539] env[68233]: DEBUG nova.compute.provider_tree [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1044.749738] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b58cba-4191-8639-a522-be6de8ac16c9, 'name': SearchDatastore_Task, 'duration_secs': 0.020034} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.750040] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.750311] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9b7df182-5830-45a2-b50d-b3564a7e0b6c/9b7df182-5830-45a2-b50d-b3564a7e0b6c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1044.750567] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c68eb71d-bcd3-4f40-a2b0-fd489256382b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.757067] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1044.757067] env[68233]: value = "task-2782938" [ 1044.757067] env[68233]: _type = "Task" [ 1044.757067] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.764838] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.782859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.783132] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.847570] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.847841] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.848059] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.848320] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.848444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.851629] env[68233]: INFO nova.compute.manager [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Terminating instance [ 1045.128539] env[68233]: DEBUG nova.scheduler.client.report [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1045.268050] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782938, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.285792] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1045.358455] env[68233]: DEBUG nova.compute.manager [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1045.358817] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.360378] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0eb9771-59d6-4edf-a8ae-20bd404e64f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.370344] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.370785] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-446b366b-327d-4c7f-8ed0-96988030816e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.377338] env[68233]: DEBUG oslo_vmware.api [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1045.377338] env[68233]: value = "task-2782939" [ 1045.377338] env[68233]: _type = "Task" [ 1045.377338] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.387641] env[68233]: DEBUG oslo_vmware.api [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.633915] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.318s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.636525] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.289s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.638024] env[68233]: INFO nova.compute.claims [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.767115] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.965522} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.767391] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9b7df182-5830-45a2-b50d-b3564a7e0b6c/9b7df182-5830-45a2-b50d-b3564a7e0b6c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1045.767610] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1045.767859] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2aef8ac4-70b8-4e74-918d-4493f790a99d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.774855] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1045.774855] env[68233]: value = "task-2782940" [ 1045.774855] env[68233]: _type = "Task" [ 1045.774855] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.780856] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1045.781175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1045.781377] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1045.785710] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50676db8-619e-4325-b175-4d6756fc8732 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.787552] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782940, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.795937] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1045.795937] env[68233]: value = "task-2782941" [ 1045.795937] env[68233]: _type = "Task" [ 1045.795937] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.804096] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.809114] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.887270] env[68233]: DEBUG oslo_vmware.api [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782939, 'name': PowerOffVM_Task, 'duration_secs': 0.445257} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.887545] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1045.887711] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1045.887956] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38c1b084-9443-4dfe-9337-924ba37eccbc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.148508] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ce794f1a-9f78-450e-be97-2da51471cef9 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 41.089s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.149696] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 16.962s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.150031] env[68233]: INFO nova.compute.manager [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Unshelving [ 1046.284297] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782940, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.235946} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.284589] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.285352] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073b7331-e787-42fb-93e5-3ae37e2d2267 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.307803] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Reconfiguring VM instance instance-00000064 to attach disk [datastore2] 9b7df182-5830-45a2-b50d-b3564a7e0b6c/9b7df182-5830-45a2-b50d-b3564a7e0b6c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1046.311085] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a1bdb6c-73a7-4d5e-870b-dfddf126eba7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.329642] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.244879} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.330849] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.331055] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.331241] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.333710] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1046.333710] env[68233]: value = "task-2782943" [ 1046.333710] env[68233]: _type = "Task" [ 1046.333710] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.341834] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782943, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.853724] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782943, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.881947] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b8e628-ae4a-472d-8cbf-e4b4f887e9ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.890599] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44abcbe4-bd7a-4ff6-9140-caaf1232f2e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.921615] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4339a6fb-bf39-4b02-9567-0dc03e8a6a95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.929176] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d492fa0-b419-4657-94ad-d57aa9d3975b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.942607] env[68233]: DEBUG nova.compute.provider_tree [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.159301] env[68233]: DEBUG nova.compute.utils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1047.352061] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782943, 'name': ReconfigVM_Task, 'duration_secs': 0.892065} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.352403] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Reconfigured VM instance instance-00000064 to attach disk [datastore2] 9b7df182-5830-45a2-b50d-b3564a7e0b6c/9b7df182-5830-45a2-b50d-b3564a7e0b6c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1047.353150] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdb32fb1-0c73-4d0c-8fd8-570089501ae2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.361899] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1047.361899] env[68233]: value = "task-2782944" [ 1047.361899] env[68233]: _type = "Task" [ 1047.361899] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.368076] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782944, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.377580] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1047.377869] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1047.378137] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1047.378439] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1047.378669] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1047.378901] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1047.379249] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1047.379496] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1047.379745] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1047.379990] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1047.380299] env[68233]: DEBUG nova.virt.hardware [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1047.381615] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a65ab26-b25f-46b2-ad1d-078bfc7d2220 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.390934] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c33907-1f01-48c6-a7e1-530f9e361713 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.397895] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1047.398118] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1047.398295] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleting the datastore file [datastore2] dca145c8-ed95-4dfb-9534-37035c75dafb {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1047.398950] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ae9c837-769e-41ff-b9f5-a088344b6584 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.408675] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:d5:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dafc44e7-03ce-48e2-b3a5-9f255fda5098', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1047.416098] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.417419] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1047.417737] env[68233]: DEBUG oslo_vmware.api [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for the task: (returnval){ [ 1047.417737] env[68233]: value = "task-2782945" [ 1047.417737] env[68233]: _type = "Task" [ 1047.417737] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.417923] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c28b93c-cd74-4fd3-bce4-acf5f14f08c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.441007] env[68233]: DEBUG oslo_vmware.api [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.442251] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1047.442251] env[68233]: value = "task-2782946" [ 1047.442251] env[68233]: _type = "Task" [ 1047.442251] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.445862] env[68233]: DEBUG nova.scheduler.client.report [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1047.453880] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782946, 'name': CreateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.662592] env[68233]: INFO nova.virt.block_device [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Booting with volume 134bd8db-5ff7-4467-9f8a-c0e2aa619b91 at /dev/sdb [ 1047.698259] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6854aeb-aeee-4725-ae6e-79bf9fb93521 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.709464] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edbfb30-38f5-47ed-ae0b-225db38843d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.738845] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-721f4690-d292-4baf-8d2f-a4e2b7207bef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.747170] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea056aa-1b82-46ce-99c2-11802023714c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.775457] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8073bb-04ef-4f11-b7d3-c9fa2db6d889 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.781557] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1832f540-f101-44be-8daa-155fef851dc7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.794378] env[68233]: DEBUG nova.virt.block_device [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating existing volume attachment record: dff69871-2dc6-480f-a72c-e2081b60caf0 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1047.870553] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782944, 'name': Rename_Task, 'duration_secs': 0.181861} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.870809] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.871056] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48987eb8-be00-4f57-b698-2aedcc60cd89 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.877551] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1047.877551] env[68233]: value = "task-2782947" [ 1047.877551] env[68233]: _type = "Task" [ 1047.877551] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.884805] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.942319] env[68233]: DEBUG oslo_vmware.api [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Task: {'id': task-2782945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231083} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.942717] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1047.942931] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1047.943131] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1047.943329] env[68233]: INFO nova.compute.manager [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Took 2.58 seconds to destroy the instance on the hypervisor. [ 1047.943559] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1047.946695] env[68233]: DEBUG nova.compute.manager [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1047.946804] env[68233]: DEBUG nova.network.neutron [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1047.953175] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.953634] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1047.956645] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782946, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.957018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.476s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.957256] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.957417] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1047.957685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.933s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.957885] env[68233]: DEBUG nova.objects.instance [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lazy-loading 'resources' on Instance uuid 827711ac-ef52-41a0-9029-0a1805522a08 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1047.959831] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2378f0f-c930-47a0-a967-28db4c036188 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.969048] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbdd326-6547-49f2-99b3-af08dd4adea7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.984111] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ff5242-e867-4795-a22b-76f2c545707c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.991300] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc41de5-b4be-490b-8120-c3d3c1a5d386 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.022550] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179077MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1048.022741] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.283273] env[68233]: DEBUG nova.compute.manager [req-2270ff32-2588-41c3-bba8-3c79756957a5 req-12c34411-5f91-466b-a4c1-1528dafd2262 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Received event network-vif-deleted-d9478083-21a3-4b61-ab65-e1281b8bac7b {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1048.283273] env[68233]: INFO nova.compute.manager [req-2270ff32-2588-41c3-bba8-3c79756957a5 req-12c34411-5f91-466b-a4c1-1528dafd2262 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Neutron deleted interface d9478083-21a3-4b61-ab65-e1281b8bac7b; detaching it from the instance and deleting it from the info cache [ 1048.283273] env[68233]: DEBUG nova.network.neutron [req-2270ff32-2588-41c3-bba8-3c79756957a5 req-12c34411-5f91-466b-a4c1-1528dafd2262 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.387804] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782947, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.452391] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782946, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.457894] env[68233]: DEBUG nova.compute.utils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1048.459463] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1048.459666] env[68233]: DEBUG nova.network.neutron [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1048.518138] env[68233]: DEBUG nova.policy [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e5641f486324f02b58e073cbb5f5035', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a56a8de59c31489da9e12518adfc6f46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1048.711061] env[68233]: DEBUG nova.network.neutron [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.724788] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb33ecc-8293-45ce-90e8-633ca27d0a6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.732877] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca96c57-4706-4a66-9293-36664b9999c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.765081] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cf1666-0a98-42b4-8d11-13decf71d7ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.772396] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3717f644-3734-49ce-bdde-9440fdcfc6dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.786016] env[68233]: DEBUG nova.compute.provider_tree [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.787267] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93863361-46c3-4bf8-ac1a-e16e22b2f88c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.795996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ed9a16-6073-4cef-b63a-b4888ada276a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.808345] env[68233]: DEBUG nova.network.neutron [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Successfully created port: 9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.830912] env[68233]: DEBUG nova.compute.manager [req-2270ff32-2588-41c3-bba8-3c79756957a5 req-12c34411-5f91-466b-a4c1-1528dafd2262 service nova] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Detach interface failed, port_id=d9478083-21a3-4b61-ab65-e1281b8bac7b, reason: Instance dca145c8-ed95-4dfb-9534-37035c75dafb could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1048.888059] env[68233]: DEBUG oslo_vmware.api [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782947, 'name': PowerOnVM_Task, 'duration_secs': 0.63769} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.888344] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.888547] env[68233]: INFO nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Took 11.09 seconds to spawn the instance on the hypervisor. [ 1048.888730] env[68233]: DEBUG nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1048.889498] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e34bff-a884-4e08-aa6a-cd0bb5ed0936 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.953011] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782946, 'name': CreateVM_Task, 'duration_secs': 1.458647} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.953228] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1048.953895] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.954072] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.954413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1048.954719] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14078630-def4-40cb-a24e-fec9521f9a75 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.958843] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1048.958843] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ae763f-7e5e-864c-8d18-881774900f57" [ 1048.958843] env[68233]: _type = "Task" [ 1048.958843] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.966343] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1048.968679] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ae763f-7e5e-864c-8d18-881774900f57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.216322] env[68233]: INFO nova.compute.manager [-] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Took 1.27 seconds to deallocate network for instance. [ 1049.290373] env[68233]: DEBUG nova.scheduler.client.report [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.409017] env[68233]: INFO nova.compute.manager [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Took 37.16 seconds to build instance. [ 1049.472660] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ae763f-7e5e-864c-8d18-881774900f57, 'name': SearchDatastore_Task, 'duration_secs': 0.009247} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.473532] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.473532] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1049.473532] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.473648] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.474274] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.474274] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ef1a373-c290-4e0e-857f-674fba7057ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.482933] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.483125] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1049.484045] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a6b7796-1218-4654-8607-2b3114767025 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.489649] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1049.489649] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52125ee3-b8ce-b530-afd9-e867cd0a0f25" [ 1049.489649] env[68233]: _type = "Task" [ 1049.489649] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.498478] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52125ee3-b8ce-b530-afd9-e867cd0a0f25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.724231] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.794854] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.837s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.799039] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.692s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.799039] env[68233]: DEBUG nova.objects.instance [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'resources' on Instance uuid 4677d047-f8dc-4501-be9b-14e6a2222f46 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1049.816731] env[68233]: INFO nova.scheduler.client.report [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Deleted allocations for instance 827711ac-ef52-41a0-9029-0a1805522a08 [ 1049.910256] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4a7c2321-f1a8-413d-bd55-6af2b18d0f7c tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.679s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.975516] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1050.001707] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52125ee3-b8ce-b530-afd9-e867cd0a0f25, 'name': SearchDatastore_Task, 'duration_secs': 0.009229} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.003954] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1050.004193] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1050.004353] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1050.004565] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1050.004695] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1050.004844] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1050.005065] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1050.005233] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1050.005404] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1050.005567] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1050.005740] env[68233]: DEBUG nova.virt.hardware [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1050.006996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34e5f47-eb82-4005-9f7e-336d1b8ae0f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.009411] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8038d2b-1f5c-416b-b32e-72ab83ce42cd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.016894] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adc80f0-ba65-439f-8e6f-4ce3adf0318a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.020698] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1050.020698] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a9d1b3-c924-4e44-4152-7c52b9a0ad93" [ 1050.020698] env[68233]: _type = "Task" [ 1050.020698] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.037205] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a9d1b3-c924-4e44-4152-7c52b9a0ad93, 'name': SearchDatastore_Task, 'duration_secs': 0.009906} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.037287] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.037513] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.037732] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12d85ff8-91aa-4693-98e7-ad73b0c3fb45 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.043833] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1050.043833] env[68233]: value = "task-2782951" [ 1050.043833] env[68233]: _type = "Task" [ 1050.043833] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.051046] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.254772] env[68233]: DEBUG nova.network.neutron [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Successfully updated port: 9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1050.317116] env[68233]: DEBUG nova.compute.manager [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Received event network-vif-plugged-9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1050.317416] env[68233]: DEBUG oslo_concurrency.lockutils [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] Acquiring lock "19cf6f80-ff11-4881-896e-9fc162ded31e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.317557] env[68233]: DEBUG oslo_concurrency.lockutils [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.318048] env[68233]: DEBUG oslo_concurrency.lockutils [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.318048] env[68233]: DEBUG nova.compute.manager [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] No waiting events found dispatching network-vif-plugged-9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1050.318048] env[68233]: WARNING nova.compute.manager [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Received unexpected event network-vif-plugged-9ecf1241-bc53-44ab-938f-7ab979d37433 for instance with vm_state building and task_state spawning. [ 1050.318267] env[68233]: DEBUG nova.compute.manager [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Received event network-changed-9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1050.318395] env[68233]: DEBUG nova.compute.manager [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Refreshing instance network info cache due to event network-changed-9ecf1241-bc53-44ab-938f-7ab979d37433. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1050.318672] env[68233]: DEBUG oslo_concurrency.lockutils [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] Acquiring lock "refresh_cache-19cf6f80-ff11-4881-896e-9fc162ded31e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.318818] env[68233]: DEBUG oslo_concurrency.lockutils [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] Acquired lock "refresh_cache-19cf6f80-ff11-4881-896e-9fc162ded31e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.318977] env[68233]: DEBUG nova.network.neutron [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Refreshing network info cache for port 9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.329938] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a1d9f96-4f46-4ba6-b5bd-c961f516b1a2 tempest-VolumesAdminNegativeTest-1302465750 tempest-VolumesAdminNegativeTest-1302465750-project-member] Lock "827711ac-ef52-41a0-9029-0a1805522a08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.488s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.412584] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.412859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.413037] env[68233]: DEBUG nova.compute.manager [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.414282] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50e7240-21dc-4653-8332-d2d4275f35aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.421739] env[68233]: DEBUG nova.compute.manager [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1050.422306] env[68233]: DEBUG nova.objects.instance [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lazy-loading 'flavor' on Instance uuid 9b7df182-5830-45a2-b50d-b3564a7e0b6c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.554781] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782951, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.433734} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.555539] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1050.556704] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1050.557558] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d34b947d-b207-458c-bd86-bb8b0366fa1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.560178] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e1b7f0-8819-4064-9064-078bf8e98659 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.568595] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd852d5-d4c8-4400-bbef-99fa2694e5aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.571941] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1050.571941] env[68233]: value = "task-2782953" [ 1050.571941] env[68233]: _type = "Task" [ 1050.571941] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.601203] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-602b3553-71c7-4737-9682-29ec64ab762a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.606769] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.611333] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35106e0-9d0b-45ea-84ed-3880400482b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.624265] env[68233]: DEBUG nova.compute.provider_tree [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1050.757755] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "refresh_cache-19cf6f80-ff11-4881-896e-9fc162ded31e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.850945] env[68233]: DEBUG nova.network.neutron [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1050.917864] env[68233]: DEBUG nova.network.neutron [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.087506] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07299} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.087805] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1051.089666] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3811068-505e-48ff-907f-feef71d2dd7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.118667] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1051.118999] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a235360-56ea-48fc-bc5b-714f105e3e3a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.136029] env[68233]: DEBUG nova.scheduler.client.report [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1051.144975] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1051.144975] env[68233]: value = "task-2782954" [ 1051.144975] env[68233]: _type = "Task" [ 1051.144975] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.155993] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782954, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.423085] env[68233]: DEBUG oslo_concurrency.lockutils [req-19d9f5c1-8544-4c34-b452-b64f8a6d22ec req-2c033a68-9705-4d24-b139-f34f74f6b758 service nova] Releasing lock "refresh_cache-19cf6f80-ff11-4881-896e-9fc162ded31e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.423085] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquired lock "refresh_cache-19cf6f80-ff11-4881-896e-9fc162ded31e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.423085] env[68233]: DEBUG nova.network.neutron [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1051.430348] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1051.430666] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5b31522e-fda2-4cb5-aaa3-3f0a84eeb23e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.438822] env[68233]: DEBUG oslo_vmware.api [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1051.438822] env[68233]: value = "task-2782955" [ 1051.438822] env[68233]: _type = "Task" [ 1051.438822] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.447560] env[68233]: DEBUG oslo_vmware.api [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.640995] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.844s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.643858] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.505s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.644132] env[68233]: DEBUG nova.objects.instance [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lazy-loading 'resources' on Instance uuid 4922985d-ad04-4c34-8dcb-6e6f8df94ff9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.661871] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782954, 'name': ReconfigVM_Task, 'duration_secs': 0.469902} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.662157] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Reconfigured VM instance instance-00000063 to attach disk [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2/e95e2309-1df5-466b-bb8a-0c9188dc07c2.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.662861] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d6c511d-368c-4ea1-816a-0b68caad5e8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.673713] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1051.673713] env[68233]: value = "task-2782956" [ 1051.673713] env[68233]: _type = "Task" [ 1051.673713] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.680791] env[68233]: INFO nova.scheduler.client.report [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted allocations for instance 4677d047-f8dc-4501-be9b-14e6a2222f46 [ 1051.692133] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782956, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.952233] env[68233]: DEBUG oslo_vmware.api [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782955, 'name': PowerOffVM_Task, 'duration_secs': 0.206407} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.952541] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.952740] env[68233]: DEBUG nova.compute.manager [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1051.953532] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53dfed6b-5799-473e-9c03-c43d77d66724 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.956783] env[68233]: DEBUG nova.network.neutron [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1052.165568] env[68233]: DEBUG nova.network.neutron [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Updating instance_info_cache with network_info: [{"id": "9ecf1241-bc53-44ab-938f-7ab979d37433", "address": "fa:16:3e:7a:af:10", "network": {"id": "862e9a6d-cdce-4180-a0ea-1a5f8328fc37", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1152649612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a56a8de59c31489da9e12518adfc6f46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60badc2d-69d2-467d-a92e-98511f5cb0b2", "external-id": "cl2-zone-408", "segmentation_id": 408, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecf1241-bc", "ovs_interfaceid": "9ecf1241-bc53-44ab-938f-7ab979d37433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.184065] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782956, 'name': Rename_Task, 'duration_secs': 0.152173} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.188370] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.189159] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75554fee-69da-47be-a7c3-9a455bb3309d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.191184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-72590c0c-3d46-45d0-acec-0b9f75bd0dc1 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4677d047-f8dc-4501-be9b-14e6a2222f46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.359s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.198575] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1052.198575] env[68233]: value = "task-2782957" [ 1052.198575] env[68233]: _type = "Task" [ 1052.198575] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.215254] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.386041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02684282-448a-47bb-965c-2964d84d24f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.394492] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de0d30e-6e10-4dcc-8aa4-eb87ca6c57b4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.427619] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ce1db3-f97a-43d9-b325-e99d5a6c3629 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.436032] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa868f14-d21e-4793-9de7-0737a6443dcd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.449997] env[68233]: DEBUG nova.compute.provider_tree [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.467829] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f412b50-2f51-4d7e-aefe-b5da95413cb0 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.054s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.606914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.607042] env[68233]: DEBUG oslo_concurrency.lockutils [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.607401] env[68233]: DEBUG nova.objects.instance [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'flavor' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.670125] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Releasing lock "refresh_cache-19cf6f80-ff11-4881-896e-9fc162ded31e" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.670528] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Instance network_info: |[{"id": "9ecf1241-bc53-44ab-938f-7ab979d37433", "address": "fa:16:3e:7a:af:10", "network": {"id": "862e9a6d-cdce-4180-a0ea-1a5f8328fc37", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1152649612-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a56a8de59c31489da9e12518adfc6f46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "60badc2d-69d2-467d-a92e-98511f5cb0b2", "external-id": "cl2-zone-408", "segmentation_id": 408, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9ecf1241-bc", "ovs_interfaceid": "9ecf1241-bc53-44ab-938f-7ab979d37433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1052.671542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:af:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '60badc2d-69d2-467d-a92e-98511f5cb0b2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9ecf1241-bc53-44ab-938f-7ab979d37433', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1052.680208] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Creating folder: Project (a56a8de59c31489da9e12518adfc6f46). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1052.680519] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6816b253-dcbd-4b53-a43d-9fb1b4948976 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.695130] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Created folder: Project (a56a8de59c31489da9e12518adfc6f46) in parent group-v559223. [ 1052.695438] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Creating folder: Instances. Parent ref: group-v559491. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1052.695721] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c045f95-6c03-40ce-bcc8-8480c27b19b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.709942] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782957, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.711392] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Created folder: Instances in parent group-v559491. [ 1052.711626] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.711816] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1052.712052] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9299ec63-d729-4dcb-8e9c-58550e0790a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.744249] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1052.744249] env[68233]: value = "task-2782960" [ 1052.744249] env[68233]: _type = "Task" [ 1052.744249] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.754378] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782960, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.953424] env[68233]: DEBUG nova.scheduler.client.report [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1053.114025] env[68233]: DEBUG nova.objects.instance [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'pci_requests' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.210975] env[68233]: DEBUG oslo_vmware.api [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782957, 'name': PowerOnVM_Task, 'duration_secs': 0.573177} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.211410] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.211759] env[68233]: DEBUG nova.compute.manager [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.213025] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ea4105-3809-44a6-91b5-07f3e63b19da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.255284] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782960, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.305973] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.307158] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.307433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.307639] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.308227] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.310754] env[68233]: INFO nova.compute.manager [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Terminating instance [ 1053.459066] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.814s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.461205] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.903s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.461205] env[68233]: DEBUG nova.objects.instance [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'resources' on Instance uuid 2812bf7c-5117-4fd9-9330-0cc94277bf5d {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1053.485593] env[68233]: INFO nova.scheduler.client.report [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleted allocations for instance 4922985d-ad04-4c34-8dcb-6e6f8df94ff9 [ 1053.621009] env[68233]: DEBUG nova.objects.base [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Object Instance<0bde10dc-6762-49fb-9c0d-6b104a3cfa39> lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1053.621250] env[68233]: DEBUG nova.network.neutron [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1053.730513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.753488] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782960, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.764763] env[68233]: DEBUG oslo_concurrency.lockutils [None req-886c2b3d-d737-43d1-ba03-5d64171dd0a1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.158s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.817156] env[68233]: DEBUG nova.compute.manager [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1053.817396] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1053.820263] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe2035d-217e-4b0d-bbf0-b98b99f6c454 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.828047] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1053.828251] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-589810c1-2f27-4518-bdaf-43dab9c448d9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.904544] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.996061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a5740e3f-6931-4cc5-99d7-9a2ee6851a81 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "4922985d-ad04-4c34-8dcb-6e6f8df94ff9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.468s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.168418] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e841066-9e27-4e17-b72f-5cf99af53287 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.177101] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006b64d4-41d8-4b03-887e-7066247077cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.212653] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990308e4-bbe1-4b32-b588-316f6aa9fb70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.221457] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6bd863-563d-41e7-90f8-43082a624f5b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.236199] env[68233]: DEBUG nova.compute.provider_tree [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.257039] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782960, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.373788] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1054.373788] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1054.373788] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore2] 9b7df182-5830-45a2-b50d-b3564a7e0b6c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1054.373788] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1069b4c6-d3ca-4921-81a0-9d113bfcd62a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.379718] env[68233]: DEBUG oslo_vmware.api [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1054.379718] env[68233]: value = "task-2782962" [ 1054.379718] env[68233]: _type = "Task" [ 1054.379718] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.390249] env[68233]: DEBUG oslo_vmware.api [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.739854] env[68233]: DEBUG nova.scheduler.client.report [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.754457] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782960, 'name': CreateVM_Task, 'duration_secs': 1.622232} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.755261] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1054.755955] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.756139] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.756461] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1054.756965] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faaaeaf8-9492-45dc-abf0-68ace181c928 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.763794] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1054.763794] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f54c98-41b7-cbae-3034-6a887b460162" [ 1054.763794] env[68233]: _type = "Task" [ 1054.763794] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.776323] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f54c98-41b7-cbae-3034-6a887b460162, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.890124] env[68233]: DEBUG oslo_vmware.api [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170618} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.890283] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.891027] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1054.891027] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1054.891027] env[68233]: INFO nova.compute.manager [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1054.891258] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.891404] env[68233]: DEBUG nova.compute.manager [-] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1054.891502] env[68233]: DEBUG nova.network.neutron [-] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1055.221163] env[68233]: DEBUG nova.compute.manager [req-c6485638-6f83-4ac3-a74a-d76561f21dd2 req-255a43fc-a133-4ed8-85d4-6ba6a7e45853 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Received event network-vif-deleted-ab615646-115b-4015-a2c3-db87dc950fcf {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1055.221384] env[68233]: INFO nova.compute.manager [req-c6485638-6f83-4ac3-a74a-d76561f21dd2 req-255a43fc-a133-4ed8-85d4-6ba6a7e45853 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Neutron deleted interface ab615646-115b-4015-a2c3-db87dc950fcf; detaching it from the instance and deleting it from the info cache [ 1055.221617] env[68233]: DEBUG nova.network.neutron [req-c6485638-6f83-4ac3-a74a-d76561f21dd2 req-255a43fc-a133-4ed8-85d4-6ba6a7e45853 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.244922] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.784s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.247896] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.439s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.249851] env[68233]: INFO nova.compute.claims [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1055.275830] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f54c98-41b7-cbae-3034-6a887b460162, 'name': SearchDatastore_Task, 'duration_secs': 0.010617} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.276167] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1055.276402] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1055.276706] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.276785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.276969] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1055.277255] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb7b0de8-d9bc-4358-9546-61d0c4f5c508 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.287076] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1055.287266] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1055.288355] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91d5391b-e376-4f18-8317-de5a371dc90a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.296037] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1055.296037] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f9a266-42e2-6588-280f-eca561afaa1e" [ 1055.296037] env[68233]: _type = "Task" [ 1055.296037] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.304466] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f9a266-42e2-6588-280f-eca561afaa1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.309057] env[68233]: INFO nova.scheduler.client.report [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleted allocations for instance 2812bf7c-5117-4fd9-9330-0cc94277bf5d [ 1055.548891] env[68233]: DEBUG nova.compute.manager [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1055.561930] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.562279] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.562524] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.562751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.562952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.565262] env[68233]: INFO nova.compute.manager [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Terminating instance [ 1055.628346] env[68233]: DEBUG nova.network.neutron [-] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.674580] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.674580] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.674580] env[68233]: DEBUG nova.objects.instance [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'flavor' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.725469] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cc0f32f8-ebe0-4bf3-8155-1bcdea44203a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.742058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7608ef9-d3e0-43f2-b7e0-8944dabdd60b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.773594] env[68233]: DEBUG nova.compute.manager [req-c6485638-6f83-4ac3-a74a-d76561f21dd2 req-255a43fc-a133-4ed8-85d4-6ba6a7e45853 service nova] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Detach interface failed, port_id=ab615646-115b-4015-a2c3-db87dc950fcf, reason: Instance 9b7df182-5830-45a2-b50d-b3564a7e0b6c could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1055.807916] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f9a266-42e2-6588-280f-eca561afaa1e, 'name': SearchDatastore_Task, 'duration_secs': 0.011828} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.810092] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fb63b86-85db-4a73-bdd2-ea7281673bec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.818032] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1055.818032] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a1f918-7e90-ae00-2814-540dcf6d635c" [ 1055.818032] env[68233]: _type = "Task" [ 1055.818032] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.821675] env[68233]: DEBUG oslo_concurrency.lockutils [None req-348fa564-5176-4c85-bf70-8c157cd47d6e tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "2812bf7c-5117-4fd9-9330-0cc94277bf5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.871s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.829437] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a1f918-7e90-ae00-2814-540dcf6d635c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.070255] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.070962] env[68233]: DEBUG nova.compute.manager [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.071183] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.072060] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab03739-b3d8-48e3-b636-76bf8fd58ab0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.080127] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.080471] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62c471a2-a44f-4fa2-ad0b-63bb2789c1af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.089058] env[68233]: DEBUG oslo_vmware.api [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1056.089058] env[68233]: value = "task-2782963" [ 1056.089058] env[68233]: _type = "Task" [ 1056.089058] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.108239] env[68233]: DEBUG oslo_vmware.api [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782963, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.131069] env[68233]: INFO nova.compute.manager [-] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Took 1.24 seconds to deallocate network for instance. [ 1056.219765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.220096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.294150] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "87385201-3118-4a8e-9739-db3b431566c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.294594] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "87385201-3118-4a8e-9739-db3b431566c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.294911] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "87385201-3118-4a8e-9739-db3b431566c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.295125] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "87385201-3118-4a8e-9739-db3b431566c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.295299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "87385201-3118-4a8e-9739-db3b431566c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.304907] env[68233]: INFO nova.compute.manager [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Terminating instance [ 1056.311372] env[68233]: DEBUG nova.objects.instance [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'pci_requests' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1056.333727] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a1f918-7e90-ae00-2814-540dcf6d635c, 'name': SearchDatastore_Task, 'duration_secs': 0.012555} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.334009] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.334328] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 19cf6f80-ff11-4881-896e-9fc162ded31e/19cf6f80-ff11-4881-896e-9fc162ded31e.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1056.334613] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0ba4e7a4-bf26-4bd4-acff-4ae1b3efd693 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.342234] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1056.342234] env[68233]: value = "task-2782964" [ 1056.342234] env[68233]: _type = "Task" [ 1056.342234] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.352137] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.602090] env[68233]: DEBUG oslo_vmware.api [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782963, 'name': PowerOffVM_Task, 'duration_secs': 0.262679} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.602409] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.603224] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.603224] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44531220-190a-4518-87bc-3b369f8d08ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.614294] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d699cb-08c0-4d6d-9998-b33cb99f7ff1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.630812] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaf55d1-aa6a-4ad0-ab73-a8d9e0cc5373 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.642651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.680319] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e731f46-90fb-4df1-b33b-7dc796d5ea0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.688900] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6a1694-4aec-4b0e-9668-06d714817477 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.706750] env[68233]: DEBUG nova.compute.provider_tree [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.710297] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1056.710541] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1056.710771] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] e95e2309-1df5-466b-bb8a-0c9188dc07c2 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1056.710997] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a0af3ec-f478-4429-b4aa-87c545e3b584 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.720458] env[68233]: DEBUG oslo_vmware.api [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1056.720458] env[68233]: value = "task-2782966" [ 1056.720458] env[68233]: _type = "Task" [ 1056.720458] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.730859] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1056.743644] env[68233]: DEBUG oslo_vmware.api [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.813357] env[68233]: DEBUG nova.compute.manager [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.815300] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.815300] env[68233]: DEBUG nova.objects.base [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Object Instance<0bde10dc-6762-49fb-9c0d-6b104a3cfa39> lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1056.815300] env[68233]: DEBUG nova.network.neutron [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1056.817812] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b18bd0b6-bf42-4f92-8bea-a7b39aa4a259 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.828049] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.828336] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4b38f4d-4907-4c98-8207-6ef6e0140b56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.838918] env[68233]: DEBUG oslo_vmware.api [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1056.838918] env[68233]: value = "task-2782967" [ 1056.838918] env[68233]: _type = "Task" [ 1056.838918] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.855025] env[68233]: DEBUG oslo_vmware.api [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.857148] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782964, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.111637] env[68233]: DEBUG nova.policy [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1057.211748] env[68233]: DEBUG nova.scheduler.client.report [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1057.233095] env[68233]: DEBUG oslo_vmware.api [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2782966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.373885} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.233095] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.233095] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.233095] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.233095] env[68233]: INFO nova.compute.manager [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1057.233543] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.233543] env[68233]: DEBUG nova.compute.manager [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.233543] env[68233]: DEBUG nova.network.neutron [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.263952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.353776] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57115} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.356709] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 19cf6f80-ff11-4881-896e-9fc162ded31e/19cf6f80-ff11-4881-896e-9fc162ded31e.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1057.357122] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1057.357397] env[68233]: DEBUG oslo_vmware.api [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782967, 'name': PowerOffVM_Task, 'duration_secs': 0.342994} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.357600] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06fcf42f-224f-4075-a5ea-612a3b405c33 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.359767] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.359942] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.360350] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ebef07d-7214-43d3-828a-bfafc6d80d8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.367424] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1057.367424] env[68233]: value = "task-2782968" [ 1057.367424] env[68233]: _type = "Task" [ 1057.367424] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.378308] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782968, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.580751] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.581065] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.581282] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.581474] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.581647] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.583820] env[68233]: INFO nova.compute.manager [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Terminating instance [ 1057.586844] env[68233]: DEBUG nova.network.neutron [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Successfully created port: ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1057.716895] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.717626] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1057.720294] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.698s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.788645] env[68233]: DEBUG nova.compute.manager [req-45c1e3c2-7f0a-434c-9504-606f09c3bbd1 req-0bb9b5b8-fff2-45a3-849f-811d704b3f1d service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Received event network-vif-deleted-dafc44e7-03ce-48e2-b3a5-9f255fda5098 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1057.788645] env[68233]: INFO nova.compute.manager [req-45c1e3c2-7f0a-434c-9504-606f09c3bbd1 req-0bb9b5b8-fff2-45a3-849f-811d704b3f1d service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Neutron deleted interface dafc44e7-03ce-48e2-b3a5-9f255fda5098; detaching it from the instance and deleting it from the info cache [ 1057.788645] env[68233]: DEBUG nova.network.neutron [req-45c1e3c2-7f0a-434c-9504-606f09c3bbd1 req-0bb9b5b8-fff2-45a3-849f-811d704b3f1d service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.881577] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782968, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091213} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.881867] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1057.883242] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5db594-e921-4af1-82d3-91dc2daa7aaa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.910099] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Reconfiguring VM instance instance-00000065 to attach disk [datastore2] 19cf6f80-ff11-4881-896e-9fc162ded31e/19cf6f80-ff11-4881-896e-9fc162ded31e.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1057.910467] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b017c5d7-b45d-4fe0-b42b-f076cb8f652d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.934305] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1057.934305] env[68233]: value = "task-2782970" [ 1057.934305] env[68233]: _type = "Task" [ 1057.934305] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.945474] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782970, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.954990] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.955409] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.955722] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleting the datastore file [datastore2] 87385201-3118-4a8e-9739-db3b431566c5 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.958814] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fed0e0a-c25b-4ebe-a2d6-83f447b55b67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.968610] env[68233]: DEBUG oslo_vmware.api [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for the task: (returnval){ [ 1057.968610] env[68233]: value = "task-2782971" [ 1057.968610] env[68233]: _type = "Task" [ 1057.968610] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.973793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "619230c4-f642-4835-8c5a-84ece6610e0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.974174] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "619230c4-f642-4835-8c5a-84ece6610e0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.974489] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "619230c4-f642-4835-8c5a-84ece6610e0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.974783] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "619230c4-f642-4835-8c5a-84ece6610e0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.975057] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "619230c4-f642-4835-8c5a-84ece6610e0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.978181] env[68233]: INFO nova.compute.manager [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Terminating instance [ 1057.986395] env[68233]: DEBUG oslo_vmware.api [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.073385] env[68233]: DEBUG nova.network.neutron [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.090749] env[68233]: DEBUG nova.compute.manager [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.090969] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.091839] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f12e41-4215-4a6e-9567-48419f9de997 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.101068] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.101068] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2f19db5-33f6-4d74-b8fb-1985cd643043 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.108332] env[68233]: DEBUG oslo_vmware.api [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1058.108332] env[68233]: value = "task-2782972" [ 1058.108332] env[68233]: _type = "Task" [ 1058.108332] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.117954] env[68233]: DEBUG oslo_vmware.api [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782972, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.224211] env[68233]: DEBUG nova.compute.utils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1058.234848] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1058.235024] env[68233]: DEBUG nova.network.neutron [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1058.290850] env[68233]: DEBUG nova.policy [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e45602ffbf4d66b6bfcac59f078e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd32ae322ad5641b4bebd1aa390b5914f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1058.293555] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a1fa4dc-eb77-4708-8fce-736e33014e9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.304764] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3af803-1f5f-46e0-9362-739f34b8317f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.340206] env[68233]: DEBUG nova.compute.manager [req-45c1e3c2-7f0a-434c-9504-606f09c3bbd1 req-0bb9b5b8-fff2-45a3-849f-811d704b3f1d service nova] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Detach interface failed, port_id=dafc44e7-03ce-48e2-b3a5-9f255fda5098, reason: Instance e95e2309-1df5-466b-bb8a-0c9188dc07c2 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1058.447541] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782970, 'name': ReconfigVM_Task, 'duration_secs': 0.354926} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.447867] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Reconfigured VM instance instance-00000065 to attach disk [datastore2] 19cf6f80-ff11-4881-896e-9fc162ded31e/19cf6f80-ff11-4881-896e-9fc162ded31e.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1058.448669] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d35d3fd9-c124-49ef-89d3-0b60366daba6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.456840] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1058.456840] env[68233]: value = "task-2782973" [ 1058.456840] env[68233]: _type = "Task" [ 1058.456840] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.467229] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782973, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.478692] env[68233]: DEBUG oslo_vmware.api [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Task: {'id': task-2782971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268736} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.478950] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.479840] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.480051] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.480251] env[68233]: INFO nova.compute.manager [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1058.480529] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1058.480762] env[68233]: DEBUG nova.compute.manager [-] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1058.480860] env[68233]: DEBUG nova.network.neutron [-] [instance: 87385201-3118-4a8e-9739-db3b431566c5] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.490429] env[68233]: DEBUG nova.compute.manager [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.490429] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.491132] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe1806e-88be-4593-9d7c-ac01657b57c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.500651] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.500947] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49d8c4b6-f2f8-46c3-a903-7ed9509cdb05 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.508793] env[68233]: DEBUG oslo_vmware.api [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1058.508793] env[68233]: value = "task-2782974" [ 1058.508793] env[68233]: _type = "Task" [ 1058.508793] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.522094] env[68233]: DEBUG oslo_vmware.api [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.576234] env[68233]: INFO nova.compute.manager [-] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Took 1.34 seconds to deallocate network for instance. [ 1058.624219] env[68233]: DEBUG oslo_vmware.api [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782972, 'name': PowerOffVM_Task, 'duration_secs': 0.296925} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.624632] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.624907] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.625293] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20f63e53-07a8-453b-85c4-f74cbb8c704d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.697196] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1058.697431] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1058.697658] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleting the datastore file [datastore2] 5d99e0cb-9742-4a6c-84d0-f8d916ef9104 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1058.698454] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12e33f20-d299-4f5e-b2aa-5677d23d3276 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.706750] env[68233]: DEBUG oslo_vmware.api [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1058.706750] env[68233]: value = "task-2782976" [ 1058.706750] env[68233]: _type = "Task" [ 1058.706750] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.719130] env[68233]: DEBUG oslo_vmware.api [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.735547] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1058.740431] env[68233]: INFO nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating resource usage from migration 834923b8-fcd6-4c82-9d81-2d5cf1fa91e5 [ 1058.767397] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 87385201-3118-4a8e-9739-db3b431566c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.767617] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 11ec9800-fa7e-4dbd-bdc1-63d0b496589f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.767817] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.768040] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.768215] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dca145c8-ed95-4dfb-9534-37035c75dafb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1058.768337] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 35587446-6f3b-465b-a2a6-0b154374734c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.768451] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 5d99e0cb-9742-4a6c-84d0-f8d916ef9104 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.768604] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 619230c4-f642-4835-8c5a-84ece6610e0f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.768914] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance e95e2309-1df5-466b-bb8a-0c9188dc07c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1058.768914] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9b7df182-5830-45a2-b50d-b3564a7e0b6c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1058.824331] env[68233]: DEBUG nova.network.neutron [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Successfully created port: dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.972398] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782973, 'name': Rename_Task, 'duration_secs': 0.179027} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.972398] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1058.972398] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a431be40-fd01-4d80-9a9c-2d8c92045170 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.982881] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1058.982881] env[68233]: value = "task-2782977" [ 1058.982881] env[68233]: _type = "Task" [ 1058.982881] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.992612] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782977, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.024754] env[68233]: DEBUG oslo_vmware.api [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782974, 'name': PowerOffVM_Task, 'duration_secs': 0.280383} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.024754] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1059.024754] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1059.024754] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61525d98-d36d-43c5-b427-ae7ed927a06d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.035637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "72c7e272-dd92-40a5-875b-3edfa1ad282b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.037346] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.087295] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.118135] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.118135] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.118328] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleting the datastore file [datastore2] 619230c4-f642-4835-8c5a-84ece6610e0f {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.119029] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50734a21-87d6-465b-946f-44739739c965 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.128081] env[68233]: DEBUG oslo_vmware.api [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for the task: (returnval){ [ 1059.128081] env[68233]: value = "task-2782979" [ 1059.128081] env[68233]: _type = "Task" [ 1059.128081] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.138380] env[68233]: DEBUG oslo_vmware.api [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.218904] env[68233]: DEBUG oslo_vmware.api [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145673} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.219882] env[68233]: DEBUG nova.network.neutron [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Successfully updated port: ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1059.221108] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.221302] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.221504] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.221721] env[68233]: INFO nova.compute.manager [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1059.222025] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.222882] env[68233]: DEBUG nova.compute.manager [-] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.222882] env[68233]: DEBUG nova.network.neutron [-] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.277115] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 62cd066c-5eac-4f07-bf4e-9275fedc7384 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1059.277381] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 19cf6f80-ff11-4881-896e-9fc162ded31e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1059.277860] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 21cc2aa9-8c88-4aa1-8847-bf7f469ca991 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1059.278529] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Migration 834923b8-fcd6-4c82-9d81-2d5cf1fa91e5 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1059.278529] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 4a388705-7e00-45dc-8891-c6e587b1cdb8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1059.443502] env[68233]: DEBUG nova.compute.manager [req-138d454e-31b9-4dc7-977a-ccbb2c6be8f2 req-bfaf2dd3-3e7e-4d4c-af45-ac3c47cf151a service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-vif-plugged-ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1059.443769] env[68233]: DEBUG oslo_concurrency.lockutils [req-138d454e-31b9-4dc7-977a-ccbb2c6be8f2 req-bfaf2dd3-3e7e-4d4c-af45-ac3c47cf151a service nova] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.444034] env[68233]: DEBUG oslo_concurrency.lockutils [req-138d454e-31b9-4dc7-977a-ccbb2c6be8f2 req-bfaf2dd3-3e7e-4d4c-af45-ac3c47cf151a service nova] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.444251] env[68233]: DEBUG oslo_concurrency.lockutils [req-138d454e-31b9-4dc7-977a-ccbb2c6be8f2 req-bfaf2dd3-3e7e-4d4c-af45-ac3c47cf151a service nova] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.444568] env[68233]: DEBUG nova.compute.manager [req-138d454e-31b9-4dc7-977a-ccbb2c6be8f2 req-bfaf2dd3-3e7e-4d4c-af45-ac3c47cf151a service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] No waiting events found dispatching network-vif-plugged-ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1059.444804] env[68233]: WARNING nova.compute.manager [req-138d454e-31b9-4dc7-977a-ccbb2c6be8f2 req-bfaf2dd3-3e7e-4d4c-af45-ac3c47cf151a service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received unexpected event network-vif-plugged-ae386ac0-a953-4d25-b510-beafa8a7c6d8 for instance with vm_state active and task_state None. [ 1059.459024] env[68233]: DEBUG nova.network.neutron [-] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.492343] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782977, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.538495] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1059.638946] env[68233]: DEBUG oslo_vmware.api [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Task: {'id': task-2782979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150622} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.639267] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.639455] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.639629] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.639804] env[68233]: INFO nova.compute.manager [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1059.640054] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.640618] env[68233]: DEBUG nova.compute.manager [-] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.640722] env[68233]: DEBUG nova.network.neutron [-] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.726443] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.726612] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.726783] env[68233]: DEBUG nova.network.neutron [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.750287] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1059.782967] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1059.783244] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1059.783459] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1059.783669] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1059.783817] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1059.783965] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1059.784196] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1059.784357] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1059.784527] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1059.784690] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1059.784865] env[68233]: DEBUG nova.virt.hardware [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1059.785607] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 7025be4e-b800-42c8-a2c0-3ea059d3b929 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1059.787378] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16990dfd-1148-42da-abf9-b643608ee203 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.796759] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59f3013-002a-4365-9a21-b1591e2c6614 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.910996] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.911283] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.961707] env[68233]: INFO nova.compute.manager [-] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Took 1.48 seconds to deallocate network for instance. [ 1059.992088] env[68233]: DEBUG oslo_vmware.api [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782977, 'name': PowerOnVM_Task, 'duration_secs': 0.747062} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.992392] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1059.992642] env[68233]: INFO nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Took 10.02 seconds to spawn the instance on the hypervisor. [ 1059.992826] env[68233]: DEBUG nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1059.993607] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc56907f-d2f7-4680-9303-b3509de80de6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.996016] env[68233]: DEBUG nova.network.neutron [-] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.059886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.267047] env[68233]: WARNING nova.network.neutron [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] 5065c922-1b9f-4d7f-8615-b5619dd4fc68 already exists in list: networks containing: ['5065c922-1b9f-4d7f-8615-b5619dd4fc68']. ignoring it [ 1060.292248] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 72c7e272-dd92-40a5-875b-3edfa1ad282b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1060.292565] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1060.292725] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1060.346399] env[68233]: DEBUG nova.network.neutron [-] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.414459] env[68233]: DEBUG nova.compute.utils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1060.468946] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.501428] env[68233]: INFO nova.compute.manager [-] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Took 1.28 seconds to deallocate network for instance. [ 1060.517213] env[68233]: INFO nova.compute.manager [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Took 29.21 seconds to build instance. [ 1060.518813] env[68233]: DEBUG nova.network.neutron [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Successfully updated port: dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1060.575775] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b144f37e-9f20-4981-8080-8a21606fa57a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.585579] env[68233]: DEBUG nova.network.neutron [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "address": "fa:16:3e:59:d7:40", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae386ac0-a9", "ovs_interfaceid": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.590359] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc63cdd-f5e0-4dfa-a2a6-5194f77d70e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.625942] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10abc7b-b414-4045-ad3a-5130243d00ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.636043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd2acd1-2179-4eeb-b70a-ec2848f9ac72 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.653127] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.849202] env[68233]: INFO nova.compute.manager [-] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Took 1.21 seconds to deallocate network for instance. [ 1060.918500] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.014757] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.020843] env[68233]: DEBUG oslo_concurrency.lockutils [None req-18aeb3d6-a41c-44eb-b22f-813dd20a5b11 tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.735s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.021373] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-21cc2aa9-8c88-4aa1-8847-bf7f469ca991" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.021490] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-21cc2aa9-8c88-4aa1-8847-bf7f469ca991" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.021644] env[68233]: DEBUG nova.network.neutron [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1061.093014] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.093686] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1061.093862] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1061.094752] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ae5098-fef6-467a-87be-5651fc6a25cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.113057] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1061.113300] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1061.113479] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1061.113698] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1061.113850] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1061.113999] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1061.114222] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1061.114380] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1061.114545] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1061.114708] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1061.114879] env[68233]: DEBUG nova.virt.hardware [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1061.121185] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfiguring VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1061.121512] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e14aa66c-bb73-4985-ac3e-1eb300c6c7ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.140888] env[68233]: DEBUG oslo_vmware.api [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1061.140888] env[68233]: value = "task-2782980" [ 1061.140888] env[68233]: _type = "Task" [ 1061.140888] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.150209] env[68233]: DEBUG oslo_vmware.api [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782980, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.157265] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1061.356755] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.561657] env[68233]: DEBUG nova.network.neutron [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1061.609715] env[68233]: DEBUG nova.compute.manager [req-53be6fd1-1c46-4e83-b2b9-f55fdf08fbe6 req-6ac9271c-0e09-442d-a7b3-a9e548aeabd4 service nova] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Received event network-vif-deleted-a38db034-3553-49b5-afdc-1b75d897f720 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1061.660871] env[68233]: DEBUG oslo_vmware.api [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782980, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.662861] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1061.663141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.943s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.663529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.939s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.663746] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.666213] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.936s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.666306] env[68233]: DEBUG nova.objects.instance [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1061.699859] env[68233]: INFO nova.scheduler.client.report [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Deleted allocations for instance dca145c8-ed95-4dfb-9534-37035c75dafb [ 1061.740226] env[68233]: DEBUG nova.network.neutron [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Updating instance_info_cache with network_info: [{"id": "dad7839d-8275-4a1a-ac8d-d506f441a90d", "address": "fa:16:3e:89:32:f4", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdad7839d-82", "ovs_interfaceid": "dad7839d-8275-4a1a-ac8d-d506f441a90d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.982649] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.983128] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.983238] env[68233]: INFO nova.compute.manager [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Attaching volume 81086a97-e05a-4835-bd9a-78b8c85c5a53 to /dev/sdb [ 1062.015154] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c283ac32-ac58-4e89-b546-afe881c855b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.023233] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12e982e-d4a1-48dc-82b1-db025c0d2bbd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.038639] env[68233]: DEBUG nova.virt.block_device [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating existing volume attachment record: 08be2fae-e333-457b-9d46-1fc1844080e5 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1062.151951] env[68233]: DEBUG oslo_vmware.api [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782980, 'name': ReconfigVM_Task, 'duration_secs': 0.63418} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.152538] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.152745] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfigured VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1062.209612] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6489e176-c792-43d3-bdd0-f16058466fa6 tempest-ServersNegativeTestJSON-1168016716 tempest-ServersNegativeTestJSON-1168016716-project-member] Lock "dca145c8-ed95-4dfb-9534-37035c75dafb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.362s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.242980] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-21cc2aa9-8c88-4aa1-8847-bf7f469ca991" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1062.243367] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Instance network_info: |[{"id": "dad7839d-8275-4a1a-ac8d-d506f441a90d", "address": "fa:16:3e:89:32:f4", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdad7839d-82", "ovs_interfaceid": "dad7839d-8275-4a1a-ac8d-d506f441a90d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1062.243774] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:32:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dad7839d-8275-4a1a-ac8d-d506f441a90d', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1062.251802] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.252332] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1062.252642] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f8df332-5aa0-48aa-8b3b-993b67069fea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.274570] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1062.274570] env[68233]: value = "task-2782982" [ 1062.274570] env[68233]: _type = "Task" [ 1062.274570] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.282756] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782982, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.294644] env[68233]: DEBUG nova.compute.manager [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-changed-ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1062.294946] env[68233]: DEBUG nova.compute.manager [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing instance network info cache due to event network-changed-ae386ac0-a953-4d25-b510-beafa8a7c6d8. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1062.295084] env[68233]: DEBUG oslo_concurrency.lockutils [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.295241] env[68233]: DEBUG oslo_concurrency.lockutils [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.295406] env[68233]: DEBUG nova.network.neutron [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing network info cache for port ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1062.657436] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1c724240-cb0c-44e5-990e-a5e187f89215 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.983s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.679811] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f4a88b47-149c-46f2-ac2c-8f4da57c09b0 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.682019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.777s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.682019] env[68233]: DEBUG nova.objects.instance [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'pci_requests' on Instance uuid 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.788071] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782982, 'name': CreateVM_Task, 'duration_secs': 0.359087} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.788528] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1062.789260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1062.789420] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1062.789741] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1062.789999] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-937cb016-8d0b-464e-b368-6dfae9d6967d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.796739] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1062.796739] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5299d41b-3a70-8a67-8754-cef082999ed3" [ 1062.796739] env[68233]: _type = "Task" [ 1062.796739] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.811322] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5299d41b-3a70-8a67-8754-cef082999ed3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.125206] env[68233]: DEBUG nova.network.neutron [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updated VIF entry in instance network info cache for port ae386ac0-a953-4d25-b510-beafa8a7c6d8. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1063.125682] env[68233]: DEBUG nova.network.neutron [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "address": "fa:16:3e:59:d7:40", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae386ac0-a9", "ovs_interfaceid": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1063.187064] env[68233]: DEBUG nova.objects.instance [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'numa_topology' on Instance uuid 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.315109] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5299d41b-3a70-8a67-8754-cef082999ed3, 'name': SearchDatastore_Task, 'duration_secs': 0.021085} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.315215] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.315412] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1063.315648] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.315796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.315974] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1063.316257] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba8b0dc1-2f39-4b84-b875-bfffd10614da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.331529] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1063.331529] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1063.334023] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3e42d9e-e324-44d2-90de-4630ccd3b47c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.341657] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1063.341657] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52453e65-3998-63e3-00f9-8b8c4acd7fbc" [ 1063.341657] env[68233]: _type = "Task" [ 1063.341657] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.357087] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52453e65-3998-63e3-00f9-8b8c4acd7fbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.526459] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "19cf6f80-ff11-4881-896e-9fc162ded31e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.526734] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.526952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "19cf6f80-ff11-4881-896e-9fc162ded31e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.527151] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.528827] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.530299] env[68233]: INFO nova.compute.manager [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Terminating instance [ 1063.628193] env[68233]: DEBUG oslo_concurrency.lockutils [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.628908] env[68233]: DEBUG nova.compute.manager [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Received event network-vif-deleted-4953ebd5-dce1-491d-a724-c337c5569470 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1063.629171] env[68233]: DEBUG nova.compute.manager [req-8de3bd7c-b541-4c13-8e9b-76dd67cc7f67 req-9c6541a1-3d98-4ec5-8726-a4059294f6c1 service nova] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Received event network-vif-deleted-f9953dba-7fc8-49a3-b5d0-41d95d904ce4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1063.688600] env[68233]: INFO nova.compute.claims [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1063.799127] env[68233]: DEBUG nova.compute.manager [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Received event network-vif-plugged-dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1063.799127] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] Acquiring lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.799335] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.799407] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.799571] env[68233]: DEBUG nova.compute.manager [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] No waiting events found dispatching network-vif-plugged-dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.801390] env[68233]: WARNING nova.compute.manager [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Received unexpected event network-vif-plugged-dad7839d-8275-4a1a-ac8d-d506f441a90d for instance with vm_state building and task_state spawning. [ 1063.801390] env[68233]: DEBUG nova.compute.manager [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Received event network-changed-dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1063.801390] env[68233]: DEBUG nova.compute.manager [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Refreshing instance network info cache due to event network-changed-dad7839d-8275-4a1a-ac8d-d506f441a90d. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1063.801390] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] Acquiring lock "refresh_cache-21cc2aa9-8c88-4aa1-8847-bf7f469ca991" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1063.801390] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] Acquired lock "refresh_cache-21cc2aa9-8c88-4aa1-8847-bf7f469ca991" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1063.801390] env[68233]: DEBUG nova.network.neutron [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Refreshing network info cache for port dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1063.857715] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52453e65-3998-63e3-00f9-8b8c4acd7fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.02548} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.858971] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f357d2b8-a0c7-47d8-8d89-5af5c31d9241 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.867534] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1063.867534] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c94904-4448-24cb-2402-01771dd7ea41" [ 1063.867534] env[68233]: _type = "Task" [ 1063.867534] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.882542] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c94904-4448-24cb-2402-01771dd7ea41, 'name': SearchDatastore_Task, 'duration_secs': 0.010899} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1063.882913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1063.883316] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 21cc2aa9-8c88-4aa1-8847-bf7f469ca991/21cc2aa9-8c88-4aa1-8847-bf7f469ca991.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1063.883681] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cedbc170-e25e-4c94-99f3-82b3b355b3ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.890898] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1063.890898] env[68233]: value = "task-2782985" [ 1063.890898] env[68233]: _type = "Task" [ 1063.890898] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.901481] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782985, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.034226] env[68233]: DEBUG nova.compute.manager [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.034475] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.035402] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13081dd2-136d-48c2-90cc-ca06004958e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.048576] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.048863] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-246f1c44-7d6a-4de4-ac7a-56e9812e85b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.057042] env[68233]: DEBUG oslo_vmware.api [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1064.057042] env[68233]: value = "task-2782986" [ 1064.057042] env[68233]: _type = "Task" [ 1064.057042] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.067528] env[68233]: DEBUG oslo_vmware.api [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.403017] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782985, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488461} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.403309] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 21cc2aa9-8c88-4aa1-8847-bf7f469ca991/21cc2aa9-8c88-4aa1-8847-bf7f469ca991.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1064.403309] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1064.403577] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ef5f0bd-5357-43d5-b905-163ffc8f5b2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.415998] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1064.415998] env[68233]: value = "task-2782987" [ 1064.415998] env[68233]: _type = "Task" [ 1064.415998] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.425016] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782987, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.530350] env[68233]: DEBUG nova.network.neutron [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Updated VIF entry in instance network info cache for port dad7839d-8275-4a1a-ac8d-d506f441a90d. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1064.530828] env[68233]: DEBUG nova.network.neutron [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Updating instance_info_cache with network_info: [{"id": "dad7839d-8275-4a1a-ac8d-d506f441a90d", "address": "fa:16:3e:89:32:f4", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdad7839d-82", "ovs_interfaceid": "dad7839d-8275-4a1a-ac8d-d506f441a90d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1064.566710] env[68233]: DEBUG oslo_vmware.api [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782986, 'name': PowerOffVM_Task, 'duration_secs': 0.303798} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.567245] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.567597] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1064.567901] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c3379fd5-bef6-4041-a759-0b778bf6cc18 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.579525] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.580837] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.003s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.581209] env[68233]: DEBUG nova.objects.instance [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'flavor' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.642734] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1064.643081] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1064.643294] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Deleting the datastore file [datastore2] 19cf6f80-ff11-4881-896e-9fc162ded31e {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1064.643598] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1cc19a0-7dd9-431e-a871-1bd13f99d386 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.651587] env[68233]: DEBUG oslo_vmware.api [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for the task: (returnval){ [ 1064.651587] env[68233]: value = "task-2782990" [ 1064.651587] env[68233]: _type = "Task" [ 1064.651587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.660431] env[68233]: DEBUG oslo_vmware.api [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.890875] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4da62d02-b09f-4c91-9e66-af8fe1d2cb24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.899446] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43253e4e-0f1c-4978-bb3b-8b65dc36359f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.933978] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3db9c5-ea3d-436d-b98c-7f8412e9478c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.941666] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782987, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071516} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.943816] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1064.944605] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2774e538-90d2-4398-9533-eb8d55233164 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.947747] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52ba9b3-299a-48b3-9655-e173122289ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.971814] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 21cc2aa9-8c88-4aa1-8847-bf7f469ca991/21cc2aa9-8c88-4aa1-8847-bf7f469ca991.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1064.979180] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43588a2d-af86-497a-8ca3-4c3810b4a754 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.993383] env[68233]: DEBUG nova.compute.provider_tree [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1065.000728] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1065.000728] env[68233]: value = "task-2782991" [ 1065.000728] env[68233]: _type = "Task" [ 1065.000728] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.010110] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782991, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.034694] env[68233]: DEBUG oslo_concurrency.lockutils [req-6e587ad0-0fcf-4ed2-8c77-bc9f933c2493 req-7b131d8c-9281-45b1-aef9-2ddaa57cacd1 service nova] Releasing lock "refresh_cache-21cc2aa9-8c88-4aa1-8847-bf7f469ca991" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.163615] env[68233]: DEBUG oslo_vmware.api [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Task: {'id': task-2782990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138408} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.163930] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1065.164174] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1065.164370] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1065.164548] env[68233]: INFO nova.compute.manager [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1065.164843] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.165092] env[68233]: DEBUG nova.compute.manager [-] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1065.165188] env[68233]: DEBUG nova.network.neutron [-] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1065.257203] env[68233]: DEBUG nova.objects.instance [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'pci_requests' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1065.496924] env[68233]: DEBUG nova.scheduler.client.report [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.513357] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782991, 'name': ReconfigVM_Task, 'duration_secs': 0.368054} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.513357] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 21cc2aa9-8c88-4aa1-8847-bf7f469ca991/21cc2aa9-8c88-4aa1-8847-bf7f469ca991.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1065.513918] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-967b13c6-4436-434b-974a-bc017a76324f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.522685] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1065.522685] env[68233]: value = "task-2782992" [ 1065.522685] env[68233]: _type = "Task" [ 1065.522685] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.535305] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782992, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.760211] env[68233]: DEBUG nova.objects.base [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Object Instance<0bde10dc-6762-49fb-9c0d-6b104a3cfa39> lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1065.760455] env[68233]: DEBUG nova.network.neutron [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1065.866950] env[68233]: DEBUG nova.policy [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1065.881415] env[68233]: DEBUG nova.compute.manager [req-07a00c12-89f9-4f98-b67f-25dfb4896d70 req-a50a0d0a-ed65-4b69-b7d9-6bc27c347395 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Received event network-vif-deleted-9ecf1241-bc53-44ab-938f-7ab979d37433 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1065.881680] env[68233]: INFO nova.compute.manager [req-07a00c12-89f9-4f98-b67f-25dfb4896d70 req-a50a0d0a-ed65-4b69-b7d9-6bc27c347395 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Neutron deleted interface 9ecf1241-bc53-44ab-938f-7ab979d37433; detaching it from the instance and deleting it from the info cache [ 1065.881831] env[68233]: DEBUG nova.network.neutron [req-07a00c12-89f9-4f98-b67f-25dfb4896d70 req-a50a0d0a-ed65-4b69-b7d9-6bc27c347395 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.932887] env[68233]: DEBUG nova.network.neutron [-] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.004022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.321s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.004022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.934s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.033440] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782992, 'name': Rename_Task, 'duration_secs': 0.152245} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.036600] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1066.036600] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20287e2d-02f4-453a-99c1-e9d0dcb75b48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.044425] env[68233]: INFO nova.network.neutron [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating port be512a20-e94b-4c51-8658-24c6e1feba94 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1066.046893] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1066.046893] env[68233]: value = "task-2782993" [ 1066.046893] env[68233]: _type = "Task" [ 1066.046893] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.057353] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782993, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.384942] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b9f109e-c303-48c1-a224-97dcedb063bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.395610] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bd8bc2-95d4-4383-a543-8ea394bba5f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.428474] env[68233]: DEBUG nova.compute.manager [req-07a00c12-89f9-4f98-b67f-25dfb4896d70 req-a50a0d0a-ed65-4b69-b7d9-6bc27c347395 service nova] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Detach interface failed, port_id=9ecf1241-bc53-44ab-938f-7ab979d37433, reason: Instance 19cf6f80-ff11-4881-896e-9fc162ded31e could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1066.435818] env[68233]: INFO nova.compute.manager [-] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Took 1.27 seconds to deallocate network for instance. [ 1066.511705] env[68233]: INFO nova.compute.claims [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1066.558467] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782993, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.586101] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1066.586101] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1066.586101] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b4e8e0-1420-41e3-b233-6a1eecc09ae7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.602620] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aedd703e-7609-48f5-b4a7-a76e56e75d2c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.629762] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-81086a97-e05a-4835-bd9a-78b8c85c5a53/volume-81086a97-e05a-4835-bd9a-78b8c85c5a53.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1066.630101] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3621efff-71f9-43ca-aeab-d17bba7959cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.649886] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1066.649886] env[68233]: value = "task-2782994" [ 1066.649886] env[68233]: _type = "Task" [ 1066.649886] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.659417] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782994, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.943919] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.017862] env[68233]: INFO nova.compute.resource_tracker [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating resource usage from migration 834923b8-fcd6-4c82-9d81-2d5cf1fa91e5 [ 1067.058168] env[68233]: DEBUG oslo_vmware.api [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782993, 'name': PowerOnVM_Task, 'duration_secs': 0.670558} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.060820] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1067.061044] env[68233]: INFO nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Took 7.31 seconds to spawn the instance on the hypervisor. [ 1067.061262] env[68233]: DEBUG nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1067.062288] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8defade8-8a70-42c3-b0e2-f3a4f2e5dd39 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.163340] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782994, 'name': ReconfigVM_Task, 'duration_secs': 0.36406} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.163340] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-81086a97-e05a-4835-bd9a-78b8c85c5a53/volume-81086a97-e05a-4835-bd9a-78b8c85c5a53.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1067.167519] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddda7a74-dc3c-4eb2-a4ca-b8631b78c4e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.187689] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1067.187689] env[68233]: value = "task-2782995" [ 1067.187689] env[68233]: _type = "Task" [ 1067.187689] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.202031] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782995, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.289023] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e13d9d8-99f1-478d-9f9e-80089522d596 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.297413] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6be786-0262-4461-8535-29c6bc9b53dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.330218] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a63f28d-fe34-4608-a209-a1f97b8e0597 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.339276] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d047f270-348b-4b35-9026-995080291a16 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.355342] env[68233]: DEBUG nova.compute.provider_tree [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.530287] env[68233]: DEBUG nova.network.neutron [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Successfully updated port: 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1067.586247] env[68233]: INFO nova.compute.manager [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Took 21.79 seconds to build instance. [ 1067.699744] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782995, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.789215] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.789348] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.789578] env[68233]: DEBUG nova.network.neutron [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1067.826862] env[68233]: DEBUG nova.compute.manager [req-f337b0e6-39a9-409f-a969-0a7ad795ba61 req-414f1b00-6b95-47f7-8d9d-afe1ef6c3dd8 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-vif-plugged-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1067.827159] env[68233]: DEBUG oslo_concurrency.lockutils [req-f337b0e6-39a9-409f-a969-0a7ad795ba61 req-414f1b00-6b95-47f7-8d9d-afe1ef6c3dd8 service nova] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.827723] env[68233]: DEBUG oslo_concurrency.lockutils [req-f337b0e6-39a9-409f-a969-0a7ad795ba61 req-414f1b00-6b95-47f7-8d9d-afe1ef6c3dd8 service nova] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.827913] env[68233]: DEBUG oslo_concurrency.lockutils [req-f337b0e6-39a9-409f-a969-0a7ad795ba61 req-414f1b00-6b95-47f7-8d9d-afe1ef6c3dd8 service nova] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.828095] env[68233]: DEBUG nova.compute.manager [req-f337b0e6-39a9-409f-a969-0a7ad795ba61 req-414f1b00-6b95-47f7-8d9d-afe1ef6c3dd8 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] No waiting events found dispatching network-vif-plugged-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1067.828311] env[68233]: WARNING nova.compute.manager [req-f337b0e6-39a9-409f-a969-0a7ad795ba61 req-414f1b00-6b95-47f7-8d9d-afe1ef6c3dd8 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received unexpected event network-vif-plugged-be512a20-e94b-4c51-8658-24c6e1feba94 for instance with vm_state shelved_offloaded and task_state spawning. [ 1067.859031] env[68233]: DEBUG nova.scheduler.client.report [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.033974] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.034266] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.035056] env[68233]: DEBUG nova.network.neutron [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.088117] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7f9d2744-59b7-492c-a140-0c3065ac8371 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.305s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.164718] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.165080] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.171328] env[68233]: DEBUG nova.compute.manager [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-vif-plugged-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1068.171733] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.171733] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.171810] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.171951] env[68233]: DEBUG nova.compute.manager [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] No waiting events found dispatching network-vif-plugged-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1068.172211] env[68233]: WARNING nova.compute.manager [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received unexpected event network-vif-plugged-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd for instance with vm_state active and task_state None. [ 1068.172403] env[68233]: DEBUG nova.compute.manager [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-changed-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1068.172588] env[68233]: DEBUG nova.compute.manager [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing instance network info cache due to event network-changed-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1068.172753] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.199857] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782995, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.364180] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.360s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.364404] env[68233]: INFO nova.compute.manager [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Migrating [ 1068.372106] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.730s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.372381] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.374566] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.111s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.376379] env[68233]: INFO nova.compute.claims [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1068.408703] env[68233]: INFO nova.scheduler.client.report [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 9b7df182-5830-45a2-b50d-b3564a7e0b6c [ 1068.534618] env[68233]: DEBUG nova.network.neutron [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.569325] env[68233]: WARNING nova.network.neutron [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] 5065c922-1b9f-4d7f-8615-b5619dd4fc68 already exists in list: networks containing: ['5065c922-1b9f-4d7f-8615-b5619dd4fc68']. ignoring it [ 1068.569533] env[68233]: WARNING nova.network.neutron [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] 5065c922-1b9f-4d7f-8615-b5619dd4fc68 already exists in list: networks containing: ['5065c922-1b9f-4d7f-8615-b5619dd4fc68']. ignoring it [ 1068.667744] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1068.699690] env[68233]: DEBUG oslo_vmware.api [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2782995, 'name': ReconfigVM_Task, 'duration_secs': 1.153696} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.700021] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1068.850103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.850384] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.850561] env[68233]: DEBUG nova.compute.manager [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.851449] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83ad8fb-476b-487f-b902-dd185b9bce96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.859109] env[68233]: DEBUG nova.compute.manager [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1068.859668] env[68233]: DEBUG nova.objects.instance [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'flavor' on Instance uuid 21cc2aa9-8c88-4aa1-8847-bf7f469ca991 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.892421] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.892654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.892846] env[68233]: DEBUG nova.network.neutron [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.916277] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa81e3f3-56a0-4c93-a01d-970642d6399d tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "9b7df182-5830-45a2-b50d-b3564a7e0b6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.609s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.962051] env[68233]: DEBUG nova.network.neutron [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "address": "fa:16:3e:59:d7:40", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae386ac0-a9", "ovs_interfaceid": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44f782f3-8c83-4b99-bb5d-1409aa3a4ddd", "address": "fa:16:3e:d8:68:d5", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f782f3-8c", "ovs_interfaceid": "44f782f3-8c83-4b99-bb5d-1409aa3a4ddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.037222] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.065853] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='cf4c944651eee9a282d41507e548280c',container_format='bare',created_at=2025-03-06T03:56:39Z,direct_url=,disk_format='vmdk',id=1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-140280414-shelved',owner='9df7c30630584a2bb79e798dcc571850',properties=ImageMetaProps,protected=,size=31666176,status='active',tags=,updated_at=2025-03-06T03:56:54Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1069.065853] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1069.065853] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1069.066054] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1069.066199] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1069.066348] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1069.066556] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1069.066715] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1069.066897] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1069.067052] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1069.067229] env[68233]: DEBUG nova.virt.hardware [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1069.068145] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fec89b8-7bd4-42d2-b643-a80d52397a79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.076742] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2613ab9-a111-41a4-9af9-75e67da2b66f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.092323] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:b9:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be512a20-e94b-4c51-8658-24c6e1feba94', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1069.100469] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1069.100717] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1069.100940] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4eb39cc7-58da-4e93-8ae7-c3a6834cd89f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.120803] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1069.120803] env[68233]: value = "task-2782996" [ 1069.120803] env[68233]: _type = "Task" [ 1069.120803] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.129095] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782996, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.193652] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.466542] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.467191] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.467370] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.468167] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.468167] env[68233]: DEBUG nova.network.neutron [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Refreshing network info cache for port 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1069.469830] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564fc92f-ec4b-4dec-9aa9-ecd1057f47cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.493561] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1069.493831] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1069.493999] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1069.494201] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1069.494348] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1069.494494] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1069.494696] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1069.494856] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1069.495065] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1069.495243] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1069.495419] env[68233]: DEBUG nova.virt.hardware [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1069.501802] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfiguring VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1069.505288] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da0c5c6f-813b-4675-921d-fb3e0de160dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.525656] env[68233]: DEBUG oslo_vmware.api [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1069.525656] env[68233]: value = "task-2782997" [ 1069.525656] env[68233]: _type = "Task" [ 1069.525656] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.537522] env[68233]: DEBUG oslo_vmware.api [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782997, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.633087] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782996, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.636375] env[68233]: DEBUG nova.network.neutron [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.639544] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed92923-e517-464d-989b-ad59e4e20081 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.647266] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c423f680-6a8d-4cd1-b8e9-98d4b4882fba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.678971] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5781f6e-64cf-4809-82c2-3b38a9aaf89e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.691479] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91561afb-e0b7-4186-9931-dd8509cfdef1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.705736] env[68233]: DEBUG nova.compute.provider_tree [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.741543] env[68233]: DEBUG nova.objects.instance [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'flavor' on Instance uuid 35587446-6f3b-465b-a2a6-0b154374734c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.868606] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1069.868947] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b02d2d02-b1d1-4567-aff9-358e485de931 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.876539] env[68233]: DEBUG oslo_vmware.api [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1069.876539] env[68233]: value = "task-2782998" [ 1069.876539] env[68233]: _type = "Task" [ 1069.876539] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.884868] env[68233]: DEBUG oslo_vmware.api [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.919379] env[68233]: DEBUG nova.compute.manager [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1069.919587] env[68233]: DEBUG nova.compute.manager [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing instance network info cache due to event network-changed-be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1069.919815] env[68233]: DEBUG oslo_concurrency.lockutils [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] Acquiring lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.919936] env[68233]: DEBUG oslo_concurrency.lockutils [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] Acquired lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.920240] env[68233]: DEBUG nova.network.neutron [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Refreshing network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1070.039507] env[68233]: DEBUG oslo_vmware.api [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782997, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.130253] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782996, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.143105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.180692] env[68233]: DEBUG nova.network.neutron [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updated VIF entry in instance network info cache for port 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.181239] env[68233]: DEBUG nova.network.neutron [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "address": "fa:16:3e:59:d7:40", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae386ac0-a9", "ovs_interfaceid": "ae386ac0-a953-4d25-b510-beafa8a7c6d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44f782f3-8c83-4b99-bb5d-1409aa3a4ddd", "address": "fa:16:3e:d8:68:d5", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f782f3-8c", "ovs_interfaceid": "44f782f3-8c83-4b99-bb5d-1409aa3a4ddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.191547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.191680] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.191971] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.192240] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.192457] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.195144] env[68233]: INFO nova.compute.manager [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Terminating instance [ 1070.208609] env[68233]: DEBUG nova.scheduler.client.report [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.246162] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c6764e60-9156-40dd-b223-c0cab46a6283 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.263s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.386835] env[68233]: DEBUG oslo_vmware.api [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2782998, 'name': PowerOffVM_Task, 'duration_secs': 0.217806} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.387125] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1070.387331] env[68233]: DEBUG nova.compute.manager [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.388126] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f28681-6b0f-4665-9c5a-ab71defdf071 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.537438] env[68233]: DEBUG oslo_vmware.api [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2782997, 'name': ReconfigVM_Task, 'duration_secs': 0.90419} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.537438] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.537438] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfigured VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1070.633766] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2782996, 'name': CreateVM_Task, 'duration_secs': 1.370992} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.633970] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1070.634622] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1070.638019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1070.638019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1070.638019] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce454816-1cdc-4fc3-a441-3aefa28aa91b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.639880] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1070.639880] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2ba3a-d00b-03dc-4d17-3d7c2fea10e7" [ 1070.639880] env[68233]: _type = "Task" [ 1070.639880] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.652629] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a2ba3a-d00b-03dc-4d17-3d7c2fea10e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.659290] env[68233]: DEBUG nova.network.neutron [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updated VIF entry in instance network info cache for port be512a20-e94b-4c51-8658-24c6e1feba94. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1070.659612] env[68233]: DEBUG nova.network.neutron [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [{"id": "be512a20-e94b-4c51-8658-24c6e1feba94", "address": "fa:16:3e:da:b9:2c", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.239", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe512a20-e9", "ovs_interfaceid": "be512a20-e94b-4c51-8658-24c6e1feba94", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1070.684659] env[68233]: DEBUG oslo_concurrency.lockutils [req-8d9724f9-6544-4254-9229-22e15948b9b9 req-b46598b7-d32d-4973-990f-7ac58fefc545 service nova] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.699084] env[68233]: DEBUG nova.compute.manager [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1070.699307] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.700392] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0dd6478-d3af-4806-9b91-6ca2768e4d10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.708668] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1070.708878] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04f83512-1bb1-4b13-983b-d6f8a0ed870a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.713341] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.713856] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1070.717365] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.630s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.717615] env[68233]: DEBUG nova.objects.instance [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'resources' on Instance uuid e95e2309-1df5-466b-bb8a-0c9188dc07c2 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1070.718729] env[68233]: DEBUG oslo_vmware.api [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1070.718729] env[68233]: value = "task-2782999" [ 1070.718729] env[68233]: _type = "Task" [ 1070.718729] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.728033] env[68233]: DEBUG oslo_vmware.api [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.900735] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c690ef25-1dc9-48ce-87b6-93289141c7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.912513] env[68233]: INFO nova.compute.manager [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Rebuilding instance [ 1070.953252] env[68233]: DEBUG nova.compute.manager [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.954321] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5f3319-cf27-4b95-966f-3d60997f7343 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.041057] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ade8413d-4200-46ea-a623-dde4a7d05944 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.460s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.151588] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.151858] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Processing image 1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1071.152107] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.152260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.152436] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1071.152698] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43c5a3fe-7402-4ae7-b084-8f2c4b569643 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.161759] env[68233]: DEBUG oslo_concurrency.lockutils [req-4dd5b62e-c627-433d-aeb4-08c138eea248 req-d38dbd0d-c796-4197-a5f5-9bb453c8747c service nova] Releasing lock "refresh_cache-62cd066c-5eac-4f07-bf4e-9275fedc7384" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1071.165395] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1071.165571] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1071.166379] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-874333ec-3544-42e0-8af0-718498f5ca2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.171614] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1071.171614] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52adf6f4-9c0f-3c64-8205-292f5d0b9a6b" [ 1071.171614] env[68233]: _type = "Task" [ 1071.171614] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.179658] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52adf6f4-9c0f-3c64-8205-292f5d0b9a6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.223253] env[68233]: DEBUG nova.compute.utils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1071.225023] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1071.225116] env[68233]: DEBUG nova.network.neutron [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1071.237272] env[68233]: DEBUG oslo_vmware.api [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2782999, 'name': PowerOffVM_Task, 'duration_secs': 0.196316} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.237542] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1071.237700] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.238457] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e32dcc11-fa43-4bcf-b354-c84c94921802 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.267357] env[68233]: DEBUG nova.policy [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65225f2affe34ceda9a265989bddfc9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74a353ea173c4b8bb74b84032d4e12b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1071.309092] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1071.309434] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1071.309744] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleting the datastore file [datastore2] 11ec9800-fa7e-4dbd-bdc1-63d0b496589f {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1071.310199] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58111ded-9731-4849-8086-5f4b7f6498bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.317670] env[68233]: DEBUG oslo_vmware.api [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for the task: (returnval){ [ 1071.317670] env[68233]: value = "task-2783001" [ 1071.317670] env[68233]: _type = "Task" [ 1071.317670] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.332882] env[68233]: DEBUG oslo_vmware.api [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2783001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.452053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.452053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.452053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1071.452310] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.452473] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.454879] env[68233]: INFO nova.compute.manager [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Terminating instance [ 1071.546668] env[68233]: DEBUG nova.network.neutron [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Successfully created port: 4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1071.555521] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61168c88-c1ce-4436-a183-ad32661c3569 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.563035] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80cc3819-bb40-4173-b228-6744ee5f7fd0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.592880] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6603aa5-e4bd-43b0-9667-e08df7a73f90 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.600817] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fcfa4fc-d10d-469c-aaed-6c1accaa47c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.617298] env[68233]: DEBUG nova.compute.provider_tree [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1071.662983] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2cf553a-542b-4ce0-bd93-cba873576f7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.685173] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1071.695801] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1071.695966] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Fetch image to [datastore2] OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2/OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1071.696171] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Downloading stream optimized image 1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 to [datastore2] OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2/OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2.vmdk on the data store datastore2 as vApp {{(pid=68233) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1071.696342] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Downloading image file data 1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 to the ESX as VM named 'OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2' {{(pid=68233) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1071.747903] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1071.810551] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1071.810551] env[68233]: value = "resgroup-9" [ 1071.810551] env[68233]: _type = "ResourcePool" [ 1071.810551] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1071.811136] env[68233]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-93e4664f-c242-44f9-ad93-9228653768b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.836457] env[68233]: DEBUG oslo_vmware.api [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Task: {'id': task-2783001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187325} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.837689] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.837771] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.837914] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.838105] env[68233]: INFO nova.compute.manager [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1071.838348] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.838592] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease: (returnval){ [ 1071.838592] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527765f6-17f1-3575-1035-0663aa112185" [ 1071.838592] env[68233]: _type = "HttpNfcLease" [ 1071.838592] env[68233]: } obtained for vApp import into resource pool (val){ [ 1071.838592] env[68233]: value = "resgroup-9" [ 1071.838592] env[68233]: _type = "ResourcePool" [ 1071.838592] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1071.838800] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the lease: (returnval){ [ 1071.838800] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527765f6-17f1-3575-1035-0663aa112185" [ 1071.838800] env[68233]: _type = "HttpNfcLease" [ 1071.838800] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1071.839017] env[68233]: DEBUG nova.compute.manager [-] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1071.839054] env[68233]: DEBUG nova.network.neutron [-] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1071.847576] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1071.847576] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527765f6-17f1-3575-1035-0663aa112185" [ 1071.847576] env[68233]: _type = "HttpNfcLease" [ 1071.847576] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1071.959755] env[68233]: DEBUG nova.compute.manager [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1071.959755] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.960405] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2174a6a-a26c-4673-972c-8181446e7c4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.968359] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1071.968880] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.969136] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d22ecc4c-9cd0-4c6d-9be3-338626418490 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.970763] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1ce0993-5413-4c1d-bb52-adea1feb7e4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.978640] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1071.978640] env[68233]: value = "task-2783004" [ 1071.978640] env[68233]: _type = "Task" [ 1071.978640] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.987771] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783004, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.044293] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.044583] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.044876] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleting the datastore file [datastore2] 21cc2aa9-8c88-4aa1-8847-bf7f469ca991 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.045282] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29036acf-3de3-437c-88c7-17d5770a1623 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.053108] env[68233]: DEBUG oslo_vmware.api [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1072.053108] env[68233]: value = "task-2783005" [ 1072.053108] env[68233]: _type = "Task" [ 1072.053108] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.061950] env[68233]: DEBUG oslo_vmware.api [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.147150] env[68233]: ERROR nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [req-045af8dd-5b8c-4eca-bb29-014ab91045d9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-045af8dd-5b8c-4eca-bb29-014ab91045d9"}]} [ 1072.166648] env[68233]: DEBUG nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1072.183663] env[68233]: DEBUG nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1072.183968] env[68233]: DEBUG nova.compute.provider_tree [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1072.193570] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1072.193896] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc2a2262-da9b-4b94-a464-17ccd7e64226 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.197187] env[68233]: DEBUG nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1072.206230] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1072.206230] env[68233]: value = "task-2783006" [ 1072.206230] env[68233]: _type = "Task" [ 1072.206230] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.221214] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783006, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.223122] env[68233]: DEBUG nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1072.348647] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1072.348647] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527765f6-17f1-3575-1035-0663aa112185" [ 1072.348647] env[68233]: _type = "HttpNfcLease" [ 1072.348647] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1072.348978] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1072.348978] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527765f6-17f1-3575-1035-0663aa112185" [ 1072.348978] env[68233]: _type = "HttpNfcLease" [ 1072.348978] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1072.349796] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a47ec7-501f-49dc-89c7-72a6eda5fe27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.358031] env[68233]: DEBUG nova.compute.manager [req-8e8df613-9115-428a-be4a-c73f1eba6d46 req-d43ea38a-0ec3-4f07-b267-23f8cdf9d8bc service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Received event network-vif-deleted-9bdac2bf-51ef-46a3-ad11-6c893bcc0570 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1072.358175] env[68233]: INFO nova.compute.manager [req-8e8df613-9115-428a-be4a-c73f1eba6d46 req-d43ea38a-0ec3-4f07-b267-23f8cdf9d8bc service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Neutron deleted interface 9bdac2bf-51ef-46a3-ad11-6c893bcc0570; detaching it from the instance and deleting it from the info cache [ 1072.358389] env[68233]: DEBUG nova.network.neutron [req-8e8df613-9115-428a-be4a-c73f1eba6d46 req-d43ea38a-0ec3-4f07-b267-23f8cdf9d8bc service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.364167] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528345b4-c4df-63f1-dfd9-bf7888b95418/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1072.364378] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating HTTP connection to write to file with size = 31666176 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528345b4-c4df-63f1-dfd9-bf7888b95418/disk-0.vmdk. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1072.432413] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63c5f73f-d4af-45f3-9667-aae256db5d1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.488137] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783004, 'name': PowerOffVM_Task, 'duration_secs': 0.202077} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.490662] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.493857] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-ae386ac0-a953-4d25-b510-beafa8a7c6d8" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.494120] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-ae386ac0-a953-4d25-b510-beafa8a7c6d8" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.536129] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ea5aac-b3b2-442f-9cac-90a322fd7fce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.544231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ada6ad5-0ae1-4da7-944a-0f9b0f3c8cad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.548452] env[68233]: INFO nova.compute.manager [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Detaching volume 81086a97-e05a-4835-bd9a-78b8c85c5a53 [ 1072.583344] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9584d0b-bafc-4fde-8a02-fc6bc1e95a13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.592813] env[68233]: DEBUG oslo_vmware.api [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157886} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.595977] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1072.596188] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1072.596384] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1072.596572] env[68233]: INFO nova.compute.manager [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Took 0.64 seconds to destroy the instance on the hypervisor. [ 1072.596921] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1072.597101] env[68233]: DEBUG nova.compute.manager [-] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1072.597211] env[68233]: DEBUG nova.network.neutron [-] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1072.599830] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a61002-1add-4671-8619-5bbf4db16026 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.607903] env[68233]: INFO nova.virt.block_device [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Attempting to driver detach volume 81086a97-e05a-4835-bd9a-78b8c85c5a53 from mountpoint /dev/sdb [ 1072.610017] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1072.610017] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1072.610017] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53b2811-2286-4fd9-b201-739fd1df8fac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.620840] env[68233]: DEBUG nova.compute.provider_tree [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1072.648164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41b6019-e92d-4ee7-9aa2-9cf0fe87078d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.656687] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3245774d-0d6d-4521-bf2e-45797d6509d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.681699] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83514bd9-8f73-4ce0-b7af-9889259c91dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.697522] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] The volume has not been displaced from its original location: [datastore2] volume-81086a97-e05a-4835-bd9a-78b8c85c5a53/volume-81086a97-e05a-4835-bd9a-78b8c85c5a53.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1072.702754] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1072.707105] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efc43754-59b1-4ce2-a4a9-7dac6408bcc9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.731165] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783006, 'name': PowerOffVM_Task, 'duration_secs': 0.20263} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.734400] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.734627] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1072.738206] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1072.738206] env[68233]: value = "task-2783007" [ 1072.738206] env[68233]: _type = "Task" [ 1072.738206] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.749870] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.763809] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1072.766247] env[68233]: DEBUG nova.network.neutron [-] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.836841] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1072.841281] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1072.841652] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1072.842111] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1072.842362] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1072.842918] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1072.843786] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1072.843786] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1072.843786] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1072.843944] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1072.844157] env[68233]: DEBUG nova.virt.hardware [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1072.846740] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7ec1ab-1e2c-4d1b-a7ae-80b889087829 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.868816] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6df0208-fecb-4760-9d5a-903241719a26 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.872642] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8365d2b0-41d7-464a-9bf7-4d6c37e8b724 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.898619] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6753a3-2571-4b84-ae19-c35f27e4f1a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.931824] env[68233]: DEBUG nova.compute.manager [req-8e8df613-9115-428a-be4a-c73f1eba6d46 req-d43ea38a-0ec3-4f07-b267-23f8cdf9d8bc service nova] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Detach interface failed, port_id=9bdac2bf-51ef-46a3-ad11-6c893bcc0570, reason: Instance 11ec9800-fa7e-4dbd-bdc1-63d0b496589f could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1072.998974] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1072.999245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.000156] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87fa664f-7163-484f-9187-5c4ec59b57c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.040791] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fbfbf4-cd80-43c2-81b1-bbd541e0ec85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.044602] env[68233]: DEBUG nova.compute.manager [req-4e257566-022f-4330-afa2-ea7e8df882fc req-09b7b021-7a1f-4686-9204-a86b4f2e2637 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Received event network-vif-plugged-4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1073.044840] env[68233]: DEBUG oslo_concurrency.lockutils [req-4e257566-022f-4330-afa2-ea7e8df882fc req-09b7b021-7a1f-4686-9204-a86b4f2e2637 service nova] Acquiring lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.045075] env[68233]: DEBUG oslo_concurrency.lockutils [req-4e257566-022f-4330-afa2-ea7e8df882fc req-09b7b021-7a1f-4686-9204-a86b4f2e2637 service nova] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.045258] env[68233]: DEBUG oslo_concurrency.lockutils [req-4e257566-022f-4330-afa2-ea7e8df882fc req-09b7b021-7a1f-4686-9204-a86b4f2e2637 service nova] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.045429] env[68233]: DEBUG nova.compute.manager [req-4e257566-022f-4330-afa2-ea7e8df882fc req-09b7b021-7a1f-4686-9204-a86b4f2e2637 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] No waiting events found dispatching network-vif-plugged-4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1073.045634] env[68233]: WARNING nova.compute.manager [req-4e257566-022f-4330-afa2-ea7e8df882fc req-09b7b021-7a1f-4686-9204-a86b4f2e2637 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Received unexpected event network-vif-plugged-4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 for instance with vm_state building and task_state spawning. [ 1073.077490] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfiguring VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1073.083837] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05a43ae2-5e80-43d9-ab5b-308c7463052e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.109022] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1073.109022] env[68233]: value = "task-2783008" [ 1073.109022] env[68233]: _type = "Task" [ 1073.109022] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.116858] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.121258] env[68233]: DEBUG nova.network.neutron [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Successfully updated port: 4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1073.161252] env[68233]: DEBUG nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 133 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1073.161252] env[68233]: DEBUG nova.compute.provider_tree [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 133 to 134 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1073.161252] env[68233]: DEBUG nova.compute.provider_tree [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1073.247450] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1073.247730] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1073.247916] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1073.248197] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1073.248395] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1073.248585] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1073.248828] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1073.249140] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1073.249353] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1073.249529] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1073.249779] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1073.257788] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b56583d6-841d-499c-82f2-d8bd7cbef98c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.273986] env[68233]: INFO nova.compute.manager [-] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Took 1.43 seconds to deallocate network for instance. [ 1073.274354] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783007, 'name': ReconfigVM_Task, 'duration_secs': 0.209219} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.276378] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1073.288304] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be08767a-5aea-442f-8144-56b4afcc1114 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.298558] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1073.298558] env[68233]: value = "task-2783009" [ 1073.298558] env[68233]: _type = "Task" [ 1073.298558] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.305742] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1073.305742] env[68233]: value = "task-2783010" [ 1073.305742] env[68233]: _type = "Task" [ 1073.305742] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.312666] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783009, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.317422] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783010, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.437895] env[68233]: DEBUG nova.network.neutron [-] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.617399] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.621787] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.621941] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.622113] env[68233]: DEBUG nova.network.neutron [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1073.665504] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.948s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.671164] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.611s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.673192] env[68233]: INFO nova.compute.claims [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.692168] env[68233]: INFO nova.scheduler.client.report [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocations for instance e95e2309-1df5-466b-bb8a-0c9188dc07c2 [ 1073.802015] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.812452] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783009, 'name': ReconfigVM_Task, 'duration_secs': 0.272766} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.813022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1073.818942] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783010, 'name': ReconfigVM_Task, 'duration_secs': 0.276728} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.819433] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1073.940890] env[68233]: INFO nova.compute.manager [-] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Took 1.34 seconds to deallocate network for instance. [ 1074.004367] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1074.004578] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528345b4-c4df-63f1-dfd9-bf7888b95418/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1074.005618] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5347eb-6c48-42a6-a681-d5a648fdb004 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.014072] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528345b4-c4df-63f1-dfd9-bf7888b95418/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1074.014072] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528345b4-c4df-63f1-dfd9-bf7888b95418/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1074.014072] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7a1b3a73-4d4a-4e48-b14d-cc1da2c23f1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.119520] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.157050] env[68233]: DEBUG nova.network.neutron [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1074.203870] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bff392df-1cbe-444d-a0c1-1a477cdac8f6 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "e95e2309-1df5-466b-bb8a-0c9188dc07c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.641s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1074.320432] env[68233]: DEBUG nova.network.neutron [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [{"id": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "address": "fa:16:3e:26:3b:ea", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f0efdc2-cf", "ovs_interfaceid": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1074.324055] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1074.324055] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1074.324267] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1074.324304] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1074.324458] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1074.324603] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1074.324806] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1074.324965] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1074.325147] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1074.325307] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1074.325474] env[68233]: DEBUG nova.virt.hardware [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1074.330868] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Reconfiguring VM instance instance-0000000c to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1074.331384] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce6289c1-d681-42f9-95ef-bdc85d45ec14 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.350864] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1074.350864] env[68233]: value = "task-2783011" [ 1074.350864] env[68233]: _type = "Task" [ 1074.350864] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.360082] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783011, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.384893] env[68233]: DEBUG nova.compute.manager [req-abd1821b-d1fe-4742-b40f-e68adca516ae req-5083a5ff-b5fb-42b2-a0ef-e2019153ab99 service nova] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Received event network-vif-deleted-dad7839d-8275-4a1a-ac8d-d506f441a90d {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1074.391273] env[68233]: DEBUG oslo_vmware.rw_handles [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528345b4-c4df-63f1-dfd9-bf7888b95418/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1074.391486] env[68233]: INFO nova.virt.vmwareapi.images [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Downloaded image file data 1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 [ 1074.392568] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6721b2a6-c1fd-49c3-8f71-36e8479e4d1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.408793] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10381ebc-fbc8-4da0-ad4a-2e6dd9441db6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.447257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.474109] env[68233]: INFO nova.virt.vmwareapi.images [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] The imported VM was unregistered [ 1074.476784] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1074.477011] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating directory with path [datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.477313] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a01a004f-3f87-4a72-817f-3900d82fce12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.488427] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created directory with path [datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.488615] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2/OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2.vmdk to [datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk. {{(pid=68233) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1074.488900] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-614b94c5-ec1c-4119-aec4-053c152e8bf9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.495845] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1074.495845] env[68233]: value = "task-2783013" [ 1074.495845] env[68233]: _type = "Task" [ 1074.495845] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.504483] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783013, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.618544] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.832795] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.833159] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Instance network_info: |[{"id": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "address": "fa:16:3e:26:3b:ea", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f0efdc2-cf", "ovs_interfaceid": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1074.833562] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:3b:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1074.841064] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1074.843809] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1074.844991] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e8ac8b4-c6e3-4c7c-8685-5e4bfdde01e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.870222] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783011, 'name': ReconfigVM_Task, 'duration_secs': 0.246812} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.871574] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Reconfigured VM instance instance-0000000c to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1074.872112] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.872337] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1074.872337] env[68233]: value = "task-2783014" [ 1074.872337] env[68233]: _type = "Task" [ 1074.872337] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.875366] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09525ef2-25f9-4fed-b52a-5dc3edc5e272 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.877796] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72fa6496-17a5-42d4-ab6a-d04dd99a5546 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.904654] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8/4a388705-7e00-45dc-8891-c6e587b1cdb8.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1074.911807] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f949ac35-9848-48bc-a6d4-6c03911505b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.928512] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783014, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.928895] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1074.928895] env[68233]: value = "task-2783015" [ 1074.928895] env[68233]: _type = "Task" [ 1074.928895] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.936038] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1074.936038] env[68233]: value = "task-2783016" [ 1074.936038] env[68233]: _type = "Task" [ 1074.936038] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.944157] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1074.944392] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1074.944586] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1074.945405] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1c0a30-b056-4607-9df5-799f57625720 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.951329] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f2a2e7-da65-4f59-886b-3d29434c722f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.954187] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783016, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.972155] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523c4273-cbb3-4f46-8ab8-e1ec194cede0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.975764] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfcd8d2-6ea8-440d-8749-975c161250f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.011201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d522cb62-0347-4a6b-ba5c-d6f1845a4796 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.014117] env[68233]: WARNING nova.virt.vmwareapi.driver [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1075.014472] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.015189] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18ee19bd-31a8-4872-b336-ecc2919f1841 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.024802] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54293b7c-de65-44b1-975b-82542e75079b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.030796] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1075.034228] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bf7c802-fcfe-4310-bbbe-8f1a06d257fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.035985] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783013, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.046098] env[68233]: DEBUG nova.compute.provider_tree [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.049432] env[68233]: DEBUG nova.compute.manager [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Received event network-changed-4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1075.049712] env[68233]: DEBUG nova.compute.manager [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Refreshing instance network info cache due to event network-changed-4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1075.049886] env[68233]: DEBUG oslo_concurrency.lockutils [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] Acquiring lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.050137] env[68233]: DEBUG oslo_concurrency.lockutils [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] Acquired lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.052217] env[68233]: DEBUG nova.network.neutron [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Refreshing network info cache for port 4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1075.119979] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.121401] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1075.121598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1075.121776] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1075.122047] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50ca71dd-7949-401d-8088-ffd63397b878 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.130505] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1075.130505] env[68233]: value = "task-2783018" [ 1075.130505] env[68233]: _type = "Task" [ 1075.130505] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.141560] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.393252] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783014, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.448397] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.518755] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783013, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.553268] env[68233]: DEBUG nova.scheduler.client.report [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.621603] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.641816] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.766145] env[68233]: DEBUG nova.network.neutron [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updated VIF entry in instance network info cache for port 4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1075.766518] env[68233]: DEBUG nova.network.neutron [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [{"id": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "address": "fa:16:3e:26:3b:ea", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f0efdc2-cf", "ovs_interfaceid": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.893063] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783014, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.949769] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.017812] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783013, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.061635] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.062181] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1076.064946] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.596s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.065208] env[68233]: DEBUG nova.objects.instance [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lazy-loading 'resources' on Instance uuid 87385201-3118-4a8e-9739-db3b431566c5 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.123103] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.143110] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.269246] env[68233]: DEBUG oslo_concurrency.lockutils [req-b67c9ca7-b4da-4d1d-a712-95f49fff1d34 req-fd08fbb6-b1fa-48be-a717-b989ad40578a service nova] Releasing lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.393494] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783014, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.449719] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783016, 'name': ReconfigVM_Task, 'duration_secs': 1.441755} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.449873] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Reconfigured VM instance instance-0000000c to attach disk [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8/4a388705-7e00-45dc-8891-c6e587b1cdb8.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.450202] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1076.518277] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783013, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.568127] env[68233]: DEBUG nova.compute.utils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1076.573029] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1076.573029] env[68233]: DEBUG nova.network.neutron [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1076.619617] env[68233]: DEBUG nova.policy [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02292d8d8a1c4258b4dd938b30360068', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7041b68cab94091bd1d4d76b858a926', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1076.630771] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.646535] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.788172] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d666b2c-2370-4c6c-8f4c-59d2f02d1d91 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.795914] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3b5845-8117-489e-b4cf-b92ffa035351 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.826747] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52331c6-15ed-435c-a072-613697b04f45 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.835432] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4786c2-d63e-4d88-a7f7-ab8d9fcb95ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.853618] env[68233]: DEBUG nova.compute.provider_tree [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1076.899129] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783014, 'name': CreateVM_Task, 'duration_secs': 1.922002} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.899129] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1076.900269] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.900424] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.900742] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1076.901110] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3312c292-23d2-4a6a-8c47-cf736db88052 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.910837] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1076.910837] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52eedd32-51a6-22e9-211e-7c7933d9130b" [ 1076.910837] env[68233]: _type = "Task" [ 1076.910837] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.923477] env[68233]: DEBUG nova.network.neutron [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Successfully created port: bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.937700] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52eedd32-51a6-22e9-211e-7c7933d9130b, 'name': SearchDatastore_Task, 'duration_secs': 0.012103} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.938052] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.938300] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1076.938559] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.938713] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.940693] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1076.940972] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e02f3152-ef25-43c5-9c7d-44d111d60f86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.953212] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1076.953411] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1076.955684] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ea86f93-e4df-4b82-bbb1-1b39e8ccc6b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.960034] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78fd904-41b8-4430-850c-fabbb113ddd5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.965929] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1076.965929] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5249cc65-ea52-26cd-0c9b-cc23ea10c3aa" [ 1076.965929] env[68233]: _type = "Task" [ 1076.965929] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.982317] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471b14dd-3f28-407b-9738-da60c1e4ff37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.989747] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5249cc65-ea52-26cd-0c9b-cc23ea10c3aa, 'name': SearchDatastore_Task, 'duration_secs': 0.008857} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.003296] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1077.006476] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d267b040-27b4-4c41-a886-5c4e92fec5f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.014766] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1077.014766] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522dfa83-62b9-3d81-c92d-8c8d84e1c483" [ 1077.014766] env[68233]: _type = "Task" [ 1077.014766] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.018114] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783013, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.335497} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.020909] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2/OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2.vmdk to [datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk. [ 1077.021119] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Cleaning up location [datastore2] OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1077.021288] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_49be2e6b-5be9-4ded-aad7-f981780541c2 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.021518] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-751919f0-1413-4df0-9808-f2ba099830da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.027794] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522dfa83-62b9-3d81-c92d-8c8d84e1c483, 'name': SearchDatastore_Task, 'duration_secs': 0.008745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.029073] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.029181] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929/7025be4e-b800-42c8-a2c0-3ea059d3b929.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1077.029458] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1077.029458] env[68233]: value = "task-2783019" [ 1077.029458] env[68233]: _type = "Task" [ 1077.029458] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.029660] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8fde8132-bc45-4f43-bc49-aca07abde58a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.040528] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.041637] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1077.041637] env[68233]: value = "task-2783020" [ 1077.041637] env[68233]: _type = "Task" [ 1077.041637] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.048710] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.073684] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1077.121092] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.140591] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.656081} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.140872] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.141130] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.141260] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.357757] env[68233]: DEBUG nova.scheduler.client.report [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1077.540569] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034994} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.540864] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.541041] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1077.541289] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk to [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1077.541537] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3228ae9-a129-475b-b29a-0bb874a18ab8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.551078] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783020, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439632} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.551995] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929/7025be4e-b800-42c8-a2c0-3ea059d3b929.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1077.552227] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1077.552517] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1077.552517] env[68233]: value = "task-2783021" [ 1077.552517] env[68233]: _type = "Task" [ 1077.552517] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.552692] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-51009779-c45d-49c6-aac4-4d3980bdd948 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.562121] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783021, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.566637] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1077.566637] env[68233]: value = "task-2783022" [ 1077.566637] env[68233]: _type = "Task" [ 1077.566637] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.567364] env[68233]: DEBUG nova.network.neutron [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Port cc05db07-a36a-494d-92b6-af58fdd9d143 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1077.575831] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783022, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.624140] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.647866] env[68233]: INFO nova.virt.block_device [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Booting with volume 81086a97-e05a-4835-bd9a-78b8c85c5a53 at /dev/sdb [ 1077.683142] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3b785a9-d105-46aa-ba37-9bf1a1f79c73 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.694453] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32e842c-a40f-4874-94cb-92c74c662dbb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.498027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.430s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.501167] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.504789] env[68233]: DEBUG nova.network.neutron [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Successfully updated port: bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1078.506909] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.492s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.507073] env[68233]: DEBUG nova.objects.instance [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lazy-loading 'resources' on Instance uuid 5d99e0cb-9742-4a6c-84d0-f8d916ef9104 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.520552] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-168e506a-af59-4b76-af2a-2c5498ca2555 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.520760] env[68233]: DEBUG nova.compute.manager [req-565a819b-1c9c-46c7-a61b-2aae9e8e3c73 req-fd178f60-8847-4ec5-b5de-b3252184974a service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Received event network-vif-plugged-bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1078.521533] env[68233]: DEBUG oslo_concurrency.lockutils [req-565a819b-1c9c-46c7-a61b-2aae9e8e3c73 req-fd178f60-8847-4ec5-b5de-b3252184974a service nova] Acquiring lock "72c7e272-dd92-40a5-875b-3edfa1ad282b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.521533] env[68233]: DEBUG oslo_concurrency.lockutils [req-565a819b-1c9c-46c7-a61b-2aae9e8e3c73 req-fd178f60-8847-4ec5-b5de-b3252184974a service nova] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.521533] env[68233]: DEBUG oslo_concurrency.lockutils [req-565a819b-1c9c-46c7-a61b-2aae9e8e3c73 req-fd178f60-8847-4ec5-b5de-b3252184974a service nova] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.521533] env[68233]: DEBUG nova.compute.manager [req-565a819b-1c9c-46c7-a61b-2aae9e8e3c73 req-fd178f60-8847-4ec5-b5de-b3252184974a service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] No waiting events found dispatching network-vif-plugged-bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1078.521722] env[68233]: WARNING nova.compute.manager [req-565a819b-1c9c-46c7-a61b-2aae9e8e3c73 req-fd178f60-8847-4ec5-b5de-b3252184974a service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Received unexpected event network-vif-plugged-bfd74e6f-d2a1-48db-8918-792fc8175d83 for instance with vm_state building and task_state spawning. [ 1078.528773] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783022, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074034} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.536015] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1078.536947] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783021, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.537065] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.540194] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90fad8b-9576-4318-bffa-7b9229ea853e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.546244] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76adbb3-259e-4d9d-b9e4-ee122092472c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.558634] env[68233]: INFO nova.scheduler.client.report [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Deleted allocations for instance 87385201-3118-4a8e-9739-db3b431566c5 [ 1078.568023] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.568023] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.568023] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.568283] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.568368] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.568514] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.568840] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.569036] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.569218] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.569386] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.569558] env[68233]: DEBUG nova.virt.hardware [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.571541] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2947279b-333b-4f0b-a65a-b80ca3b22b92 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.600055] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929/7025be4e-b800-42c8-a2c0-3ea059d3b929.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1078.601528] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55194d37-0c81-4830-94c2-0334d0f21c2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.616555] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a07612-8181-4879-8be6-d7b6b50c46ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.624291] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffaa9ef-ffcd-4807-8c1f-f8dc157b110f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.632591] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfd70eb-f704-4d6a-b596-a6c2ff7db279 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.635316] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1078.635316] env[68233]: value = "task-2783023" [ 1078.635316] env[68233]: _type = "Task" [ 1078.635316] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.655303] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.659302] env[68233]: DEBUG nova.virt.block_device [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating existing volume attachment record: 4dd281bc-de69-4af0-b9a9-a3c1a6dc92a0 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1079.017570] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783021, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.025051] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "refresh_cache-72c7e272-dd92-40a5-875b-3edfa1ad282b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.025246] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquired lock "refresh_cache-72c7e272-dd92-40a5-875b-3edfa1ad282b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.025366] env[68233]: DEBUG nova.network.neutron [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.030023] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.033577] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.033831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.034413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.069868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac76293-fc82-4fb5-b327-e09b0e869171 tempest-ServerRescueNegativeTestJSON-1058840264 tempest-ServerRescueNegativeTestJSON-1058840264-project-member] Lock "87385201-3118-4a8e-9739-db3b431566c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.775s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.156708] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.264044] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9e90a5-e8bf-47e4-b184-a43580bf9841 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.273148] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda427a5-72ef-48de-be12-0fcfcf2dd8b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.308840] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8c8929-5a58-416a-8002-d3ed81c26bbc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.318436] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8da721-5650-4361-bb36-87d11f083b1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.334446] env[68233]: DEBUG nova.compute.provider_tree [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1079.507479] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783021, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.516227] env[68233]: DEBUG oslo_vmware.api [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783008, 'name': ReconfigVM_Task, 'duration_secs': 6.233779} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.516497] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.516708] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Reconfigured VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1079.593392] env[68233]: DEBUG nova.network.neutron [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1079.663246] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.884158] env[68233]: DEBUG nova.scheduler.client.report [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1079.884300] env[68233]: DEBUG nova.compute.provider_tree [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 134 to 135 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1079.884427] env[68233]: DEBUG nova.compute.provider_tree [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1079.954755] env[68233]: DEBUG nova.network.neutron [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Updating instance_info_cache with network_info: [{"id": "bfd74e6f-d2a1-48db-8918-792fc8175d83", "address": "fa:16:3e:82:c7:6f", "network": {"id": "0d28c3e3-fad0-4c59-b9e3-d71fad61cfc6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1082840875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7041b68cab94091bd1d4d76b858a926", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfd74e6f-d2", "ovs_interfaceid": "bfd74e6f-d2a1-48db-8918-792fc8175d83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.009334] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783021, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.400111} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.009674] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14/1ae40f5b-ab4b-4b07-ac0d-d97ead9c2e14.vmdk to [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1080.010489] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661b3c16-9ec6-4868-ae4a-e029de8ad374 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.033496] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.033853] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35111b1b-bcdf-42dd-af4f-47f7595a805e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.055665] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1080.055665] env[68233]: value = "task-2783024" [ 1080.055665] env[68233]: _type = "Task" [ 1080.055665] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.064442] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783024, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.102018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.102018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.102018] env[68233]: DEBUG nova.network.neutron [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.156369] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783023, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.334497] env[68233]: DEBUG nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Received event network-changed-bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1080.334497] env[68233]: DEBUG nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Refreshing instance network info cache due to event network-changed-bfd74e6f-d2a1-48db-8918-792fc8175d83. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1080.334497] env[68233]: DEBUG oslo_concurrency.lockutils [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Acquiring lock "refresh_cache-72c7e272-dd92-40a5-875b-3edfa1ad282b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.392471] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.395310] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.038s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.395759] env[68233]: DEBUG nova.objects.instance [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lazy-loading 'resources' on Instance uuid 619230c4-f642-4835-8c5a-84ece6610e0f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1080.418728] env[68233]: INFO nova.scheduler.client.report [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleted allocations for instance 5d99e0cb-9742-4a6c-84d0-f8d916ef9104 [ 1080.458274] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Releasing lock "refresh_cache-72c7e272-dd92-40a5-875b-3edfa1ad282b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.458712] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Instance network_info: |[{"id": "bfd74e6f-d2a1-48db-8918-792fc8175d83", "address": "fa:16:3e:82:c7:6f", "network": {"id": "0d28c3e3-fad0-4c59-b9e3-d71fad61cfc6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1082840875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7041b68cab94091bd1d4d76b858a926", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfd74e6f-d2", "ovs_interfaceid": "bfd74e6f-d2a1-48db-8918-792fc8175d83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1080.459831] env[68233]: DEBUG oslo_concurrency.lockutils [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Acquired lock "refresh_cache-72c7e272-dd92-40a5-875b-3edfa1ad282b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.460072] env[68233]: DEBUG nova.network.neutron [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Refreshing network info cache for port bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1080.462145] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:c7:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfd74e6f-d2a1-48db-8918-792fc8175d83', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.469934] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Creating folder: Project (c7041b68cab94091bd1d4d76b858a926). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.474344] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4987684b-1f4f-4d46-841b-63c70942cbff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.489035] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Created folder: Project (c7041b68cab94091bd1d4d76b858a926) in parent group-v559223. [ 1080.489035] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Creating folder: Instances. Parent ref: group-v559500. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1080.489035] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2dc4b5b-b8b5-4c4f-893a-bde851a6d9fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.498086] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Created folder: Instances in parent group-v559500. [ 1080.498332] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.498523] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.498725] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b23a61c1-e557-449c-a590-ef6e7acc2ba8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.521809] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.521809] env[68233]: value = "task-2783027" [ 1080.521809] env[68233]: _type = "Task" [ 1080.521809] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.530363] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783027, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.565899] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783024, 'name': ReconfigVM_Task, 'duration_secs': 0.297044} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.565899] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384/62cd066c-5eac-4f07-bf4e-9275fedc7384.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.567071] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'device_type': 'disk', 'size': 0, 'encryption_format': None, 'encryption_options': None, 'boot_index': 0, 'encrypted': False, 'device_name': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'image_id': 'da133fda-e1e2-42a1-a7e0-b8b1426a8490'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559490', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'name': 'volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '62cd066c-5eac-4f07-bf4e-9275fedc7384', 'attached_at': '', 'detached_at': '', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'serial': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91'}, 'boot_index': None, 'attachment_id': 'dff69871-2dc6-480f-a72c-e2081b60caf0', 'mount_device': '/dev/sdb', 'disk_bus': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68233) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1080.567071] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1080.567271] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559490', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'name': 'volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '62cd066c-5eac-4f07-bf4e-9275fedc7384', 'attached_at': '', 'detached_at': '', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'serial': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1080.568246] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e560f5-940c-4e1f-a041-a0aae47b8034 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.585277] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f1a307-d446-4bb7-8d38-9275261e35d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.608166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.608588] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.608899] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.609154] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.609380] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.620566] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91/volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.621300] env[68233]: INFO nova.compute.manager [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Terminating instance [ 1080.622620] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa83c68d-5196-4306-9bba-66f98ee47133 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.644588] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1080.644588] env[68233]: value = "task-2783028" [ 1080.644588] env[68233]: _type = "Task" [ 1080.644588] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.655529] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783028, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.658700] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783023, 'name': ReconfigVM_Task, 'duration_secs': 1.86019} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.660876] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929/7025be4e-b800-42c8-a2c0-3ea059d3b929.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.661526] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aab38542-68ad-4cc1-bd3e-f968d5ad9751 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.667327] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1080.667327] env[68233]: value = "task-2783029" [ 1080.667327] env[68233]: _type = "Task" [ 1080.667327] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.680176] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783029, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.795484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.795484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.795689] env[68233]: DEBUG nova.network.neutron [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.815849] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1080.816125] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1080.816313] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1080.816664] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1080.816913] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1080.817576] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1080.818016] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1080.818128] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1080.818404] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1080.818732] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1080.818986] env[68233]: DEBUG nova.virt.hardware [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1080.820352] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985b898f-99b3-4891-9e9b-0f463cebd4b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.833708] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bb4050-5a31-4bbc-a540-3d3116a1e64a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.852075] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:49:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2eef47a-821b-4644-9b1b-6ca932ebe044', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.861687] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.865316] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.865676] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c2cfa21-48d6-4cd1-b0a6-85acbc65cf2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.885435] env[68233]: DEBUG nova.network.neutron [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.890023] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.890023] env[68233]: value = "task-2783030" [ 1080.890023] env[68233]: _type = "Task" [ 1080.890023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.899808] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783030, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.930522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a50ee8dd-3797-4b04-bbb4-9daae51214d9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "5d99e0cb-9742-4a6c-84d0-f8d916ef9104" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.349s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.956396] env[68233]: DEBUG nova.network.neutron [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Updated VIF entry in instance network info cache for port bfd74e6f-d2a1-48db-8918-792fc8175d83. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1080.956743] env[68233]: DEBUG nova.network.neutron [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Updating instance_info_cache with network_info: [{"id": "bfd74e6f-d2a1-48db-8918-792fc8175d83", "address": "fa:16:3e:82:c7:6f", "network": {"id": "0d28c3e3-fad0-4c59-b9e3-d71fad61cfc6", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1082840875-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7041b68cab94091bd1d4d76b858a926", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfd74e6f-d2", "ovs_interfaceid": "bfd74e6f-d2a1-48db-8918-792fc8175d83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.034547] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783027, 'name': CreateVM_Task, 'duration_secs': 0.383747} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.034757] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1081.035613] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.035737] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.036110] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1081.036379] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-933dc7ed-56b2-473d-b8d3-a93c1ae9ae78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.041937] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1081.041937] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527da6ff-8a17-4872-da6f-fb09e1c63865" [ 1081.041937] env[68233]: _type = "Task" [ 1081.041937] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.055207] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527da6ff-8a17-4872-da6f-fb09e1c63865, 'name': SearchDatastore_Task, 'duration_secs': 0.010455} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.055392] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.055635] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.055894] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.056056] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.056252] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.058958] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a491a0d-451c-49cc-abba-2f1350abf918 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.070250] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.070250] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.070941] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-228c0846-7625-43dc-8b09-eb26f10c185d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.079853] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1081.079853] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b84374-b811-db02-08b8-fddf891b5179" [ 1081.079853] env[68233]: _type = "Task" [ 1081.079853] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.090023] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b84374-b811-db02-08b8-fddf891b5179, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.133348] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36f1b84-56dd-46f3-abb3-d5b66760ce36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.140587] env[68233]: DEBUG nova.compute.manager [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1081.140856] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1081.142531] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b870d86d-0374-4c7a-9d1e-1a34e1616f30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.145639] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d05832-8dcb-4265-b14c-760e452700ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.155831] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1081.183499] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4a774bd-784b-460b-bb79-20077accdb3a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.185721] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783028, 'name': ReconfigVM_Task, 'duration_secs': 0.478882} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.189151] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7477652f-d8f6-49b3-8c07-407fffd21e1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.191474] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfigured VM instance instance-00000054 to attach disk [datastore2] volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91/volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1081.196573] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fae9ff2d-9ffd-498e-8647-4aa002cec641 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.209030] env[68233]: DEBUG oslo_vmware.api [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1081.209030] env[68233]: value = "task-2783031" [ 1081.209030] env[68233]: _type = "Task" [ 1081.209030] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.221960] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1081.221960] env[68233]: value = "task-2783032" [ 1081.221960] env[68233]: _type = "Task" [ 1081.221960] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.222994] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783029, 'name': Rename_Task, 'duration_secs': 0.143971} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.224379] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1081.226025] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1353ae3-f2ce-415d-ac47-4a245ba5f735 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.238445] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c9f1d60c-2cdb-42aa-9341-40549c63acd3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.240257] env[68233]: DEBUG oslo_vmware.api [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783031, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.245516] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783032, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.254100] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1081.254100] env[68233]: value = "task-2783033" [ 1081.254100] env[68233]: _type = "Task" [ 1081.254100] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.254486] env[68233]: DEBUG nova.compute.provider_tree [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1081.264526] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783033, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.389163] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.407402] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783030, 'name': CreateVM_Task, 'duration_secs': 0.402535} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.407906] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1081.409700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.409700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.409700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1081.409700] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6ad6b3c-639e-4142-8018-8eba2f4007a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.413799] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1081.413799] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e01557-daf6-6717-3db4-0bfec70b4ec6" [ 1081.413799] env[68233]: _type = "Task" [ 1081.413799] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.422106] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e01557-daf6-6717-3db4-0bfec70b4ec6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.459150] env[68233]: DEBUG oslo_concurrency.lockutils [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Releasing lock "refresh_cache-72c7e272-dd92-40a5-875b-3edfa1ad282b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.459510] env[68233]: DEBUG nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-vif-deleted-ae386ac0-a953-4d25-b510-beafa8a7c6d8 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1081.460129] env[68233]: INFO nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Neutron deleted interface ae386ac0-a953-4d25-b510-beafa8a7c6d8; detaching it from the instance and deleting it from the info cache [ 1081.460129] env[68233]: DEBUG nova.network.neutron [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44f782f3-8c83-4b99-bb5d-1409aa3a4ddd", "address": "fa:16:3e:d8:68:d5", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44f782f3-8c", "ovs_interfaceid": "44f782f3-8c83-4b99-bb5d-1409aa3a4ddd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.591238] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b84374-b811-db02-08b8-fddf891b5179, 'name': SearchDatastore_Task, 'duration_secs': 0.011761} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.593267] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c083e4fa-ec23-438e-a01a-40dbd5438923 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.599259] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1081.599259] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52188ac9-afe9-8166-1cbc-ff384814b5ab" [ 1081.599259] env[68233]: _type = "Task" [ 1081.599259] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.608310] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52188ac9-afe9-8166-1cbc-ff384814b5ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.631737] env[68233]: INFO nova.network.neutron [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Port ae386ac0-a953-4d25-b510-beafa8a7c6d8 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1081.631737] env[68233]: INFO nova.network.neutron [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Port 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1081.631737] env[68233]: DEBUG nova.network.neutron [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.721113] env[68233]: DEBUG oslo_vmware.api [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783031, 'name': PowerOffVM_Task, 'duration_secs': 0.233668} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.721113] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1081.721113] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1081.721113] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2201f093-f186-4f62-9aef-37fc7b6b6536 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.732472] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783032, 'name': ReconfigVM_Task, 'duration_secs': 0.159778} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.733138] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559490', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'name': 'volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '62cd066c-5eac-4f07-bf4e-9275fedc7384', 'attached_at': '', 'detached_at': '', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'serial': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1081.733939] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e07216fe-dd19-484a-a40c-875f24908ac0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.739812] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1081.739812] env[68233]: value = "task-2783035" [ 1081.739812] env[68233]: _type = "Task" [ 1081.739812] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.747649] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783035, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.766978] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783033, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.790995] env[68233]: DEBUG nova.scheduler.client.report [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1081.791453] env[68233]: DEBUG nova.compute.provider_tree [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 135 to 136 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1081.791742] env[68233]: DEBUG nova.compute.provider_tree [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1081.833842] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1081.834506] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1081.834506] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleting the datastore file [datastore2] 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1081.835085] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6af473c2-f617-40af-bf39-9a1aad76d52f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.844274] env[68233]: DEBUG oslo_vmware.api [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1081.844274] env[68233]: value = "task-2783036" [ 1081.844274] env[68233]: _type = "Task" [ 1081.844274] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.852275] env[68233]: DEBUG oslo_vmware.api [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783036, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.920269] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e22f554-727c-4366-9462-0574bf27f8af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.927202] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e01557-daf6-6717-3db4-0bfec70b4ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.927202] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.927383] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.927529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.943846] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bcf72f8-110a-4341-8e50-ff75a08f0d2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.950772] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1081.962294] env[68233]: DEBUG oslo_concurrency.lockutils [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Acquiring lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.109842] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52188ac9-afe9-8166-1cbc-ff384814b5ab, 'name': SearchDatastore_Task, 'duration_secs': 0.009} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.110136] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.110451] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 72c7e272-dd92-40a5-875b-3edfa1ad282b/72c7e272-dd92-40a5-875b-3edfa1ad282b.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.110768] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.110996] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1082.111241] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1521766-7960-4fc7-90af-0ecfc68995d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.113160] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-130dc7ee-12d1-4555-b246-093d302581cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.119456] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1082.119456] env[68233]: value = "task-2783037" [ 1082.119456] env[68233]: _type = "Task" [ 1082.119456] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.123218] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1082.123407] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1082.124737] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf1ee1f6-6ffd-491f-8ab7-907f28857b90 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.129903] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.132884] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.135060] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1082.135060] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526f36a7-0949-b98c-6204-2488b851b0b3" [ 1082.135060] env[68233]: _type = "Task" [ 1082.135060] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.142587] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526f36a7-0949-b98c-6204-2488b851b0b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.252210] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783035, 'name': Rename_Task, 'duration_secs': 0.168891} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.252210] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.252210] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d7b9be09-8b67-4690-ad6b-1fcdef0be526 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.263095] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1082.263095] env[68233]: value = "task-2783038" [ 1082.263095] env[68233]: _type = "Task" [ 1082.263095] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.272160] env[68233]: DEBUG oslo_vmware.api [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783033, 'name': PowerOnVM_Task, 'duration_secs': 0.600449} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.272941] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1082.273265] env[68233]: INFO nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1082.273531] env[68233]: DEBUG nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1082.275041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeebb9e5-eef0-49d0-aa19-ebbbc75dc90b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.282810] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783038, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.297140] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.300269] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.356s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.300584] env[68233]: DEBUG nova.objects.instance [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lazy-loading 'resources' on Instance uuid 19cf6f80-ff11-4881-896e-9fc162ded31e {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1082.319420] env[68233]: INFO nova.scheduler.client.report [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Deleted allocations for instance 619230c4-f642-4835-8c5a-84ece6610e0f [ 1082.353857] env[68233]: DEBUG oslo_vmware.api [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783036, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.431587} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.354246] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1082.354456] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1082.354711] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1082.354895] env[68233]: INFO nova.compute.manager [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1082.355229] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.355427] env[68233]: DEBUG nova.compute.manager [-] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1082.355522] env[68233]: DEBUG nova.network.neutron [-] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1082.400894] env[68233]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd could not be found.", "detail": ""}} {{(pid=68233) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1082.401244] env[68233]: DEBUG nova.network.neutron [-] Unable to show port 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd as it no longer exists. {{(pid=68233) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1082.458241] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1082.458903] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4f1b0f1-5cdb-4b4f-bc64-ff9210d767e9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.467489] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1082.467489] env[68233]: value = "task-2783039" [ 1082.467489] env[68233]: _type = "Task" [ 1082.467489] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.476299] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.632202] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783037, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.636814] env[68233]: DEBUG oslo_concurrency.lockutils [None req-be135974-92b7-494a-8e06-4631e609efaf tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-0bde10dc-6762-49fb-9c0d-6b104a3cfa39-ae386ac0-a953-4d25-b510-beafa8a7c6d8" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.143s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.648311] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526f36a7-0949-b98c-6204-2488b851b0b3, 'name': SearchDatastore_Task, 'duration_secs': 0.010864} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.649346] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-918dec3e-8f3d-490a-89b2-12745a0c05b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.655148] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1082.655148] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52832f95-79b3-7f0a-fb9b-f4e25695f8da" [ 1082.655148] env[68233]: _type = "Task" [ 1082.655148] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.664837] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52832f95-79b3-7f0a-fb9b-f4e25695f8da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.778164] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783038, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.807150] env[68233]: INFO nova.compute.manager [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Took 25.57 seconds to build instance. [ 1082.827917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5a6e11f2-928a-468d-896d-19f6396fc4b9 tempest-ListServersNegativeTestJSON-1421082387 tempest-ListServersNegativeTestJSON-1421082387-project-member] Lock "619230c4-f642-4835-8c5a-84ece6610e0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.854s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.977350] env[68233]: DEBUG oslo_vmware.api [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783039, 'name': PowerOnVM_Task, 'duration_secs': 0.466414} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.977606] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1082.977786] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4538befb-121b-4fa9-8254-da8abc833787 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance '4a388705-7e00-45dc-8891-c6e587b1cdb8' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1083.013597] env[68233]: DEBUG nova.compute.manager [req-fff265ea-b819-4d33-9567-c84c34023b71 req-15befe02-ae21-41d6-9c0c-051094ebf4f9 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-vif-deleted-7d72ccc2-ee10-4121-9a73-41bc93e7493e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1083.013798] env[68233]: INFO nova.compute.manager [req-fff265ea-b819-4d33-9567-c84c34023b71 req-15befe02-ae21-41d6-9c0c-051094ebf4f9 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Neutron deleted interface 7d72ccc2-ee10-4121-9a73-41bc93e7493e; detaching it from the instance and deleting it from the info cache [ 1083.013973] env[68233]: DEBUG nova.network.neutron [req-fff265ea-b819-4d33-9567-c84c34023b71 req-15befe02-ae21-41d6-9c0c-051094ebf4f9 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.041581] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a50ac78-b99f-45c1-b2ce-8f647228f717 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.051375] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4462b8-ade8-4b11-ba5a-05773cf57f42 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.089825] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce603e6f-7cc6-48ad-9839-e1f2a415df26 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.097569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b94cd3-9929-494d-bf41-0e66b109e4d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.112592] env[68233]: DEBUG nova.compute.provider_tree [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.131170] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783037, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5854} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.131457] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 72c7e272-dd92-40a5-875b-3edfa1ad282b/72c7e272-dd92-40a5-875b-3edfa1ad282b.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.131675] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.131922] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f04fd312-1be1-45bc-8d72-248378ada5f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.139018] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1083.139018] env[68233]: value = "task-2783040" [ 1083.139018] env[68233]: _type = "Task" [ 1083.139018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.148437] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.166746] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52832f95-79b3-7f0a-fb9b-f4e25695f8da, 'name': SearchDatastore_Task, 'duration_secs': 0.034626} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.168295] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.168295] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1083.168295] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f62659d9-42bd-46da-914e-5d2c95e0ae61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.175344] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1083.175344] env[68233]: value = "task-2783041" [ 1083.175344] env[68233]: _type = "Task" [ 1083.175344] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.184407] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783041, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.275325] env[68233]: DEBUG oslo_vmware.api [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783038, 'name': PowerOnVM_Task, 'duration_secs': 0.67191} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.275648] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1083.306592] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2920096e-fcd7-422c-a214-139da4896bb3 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.086s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.376094] env[68233]: DEBUG nova.compute.manager [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1083.377003] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c046799-615a-4faf-bc87-6fbedc6ca843 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.496234] env[68233]: DEBUG nova.network.neutron [-] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.516714] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b7128fb-74f1-4b4f-8cd4-0a111b0f17d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.531522] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7605381e-cfff-4adc-b99e-8ec93190cc9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.572275] env[68233]: DEBUG nova.compute.manager [req-fff265ea-b819-4d33-9567-c84c34023b71 req-15befe02-ae21-41d6-9c0c-051094ebf4f9 service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Detach interface failed, port_id=7d72ccc2-ee10-4121-9a73-41bc93e7493e, reason: Instance 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1083.615698] env[68233]: DEBUG nova.scheduler.client.report [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.651090] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.099021} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.651389] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.652244] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6edf9eb-29d4-463b-bec7-b26a88bc037f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.675617] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] 72c7e272-dd92-40a5-875b-3edfa1ad282b/72c7e272-dd92-40a5-875b-3edfa1ad282b.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.676066] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-997de179-7d60-4142-80ef-df37ecd144d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.699989] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783041, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.701296] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1083.701296] env[68233]: value = "task-2783042" [ 1083.701296] env[68233]: _type = "Task" [ 1083.701296] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.708858] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.896387] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5ea0a19b-1b95-47a1-a396-94695c0b0dba tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 37.746s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.995883] env[68233]: INFO nova.compute.manager [-] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Took 1.64 seconds to deallocate network for instance. [ 1084.121606] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.821s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.124093] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.931s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.125966] env[68233]: INFO nova.compute.claims [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1084.141861] env[68233]: INFO nova.scheduler.client.report [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Deleted allocations for instance 19cf6f80-ff11-4881-896e-9fc162ded31e [ 1084.188719] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783041, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.885022} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.189808] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1084.190110] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1084.190493] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3967374a-97ef-4bba-9537-c9e47a19eff8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.199927] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1084.199927] env[68233]: value = "task-2783043" [ 1084.199927] env[68233]: _type = "Task" [ 1084.199927] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.212240] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783043, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.216928] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783042, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.504978] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.649433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c7327329-ed67-47b5-8cea-497d148cb27b tempest-ServersNegativeTestMultiTenantJSON-1897797569 tempest-ServersNegativeTestMultiTenantJSON-1897797569-project-member] Lock "19cf6f80-ff11-4881-896e-9fc162ded31e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.123s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.714557] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783043, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104025} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.714699] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783042, 'name': ReconfigVM_Task, 'duration_secs': 0.687988} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.714928] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1084.715254] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Reconfigured VM instance instance-00000068 to attach disk [datastore2] 72c7e272-dd92-40a5-875b-3edfa1ad282b/72c7e272-dd92-40a5-875b-3edfa1ad282b.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.716506] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ba57bf-4548-425d-b2b0-dafa7ad37a02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.718852] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1d2ad34-e42a-40ea-b69e-939811ec5419 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.740614] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1084.742100] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cde9eedb-a734-44e1-81c9-7fa5c74709e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.757265] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1084.757265] env[68233]: value = "task-2783044" [ 1084.757265] env[68233]: _type = "Task" [ 1084.757265] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.762275] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1084.762275] env[68233]: value = "task-2783045" [ 1084.762275] env[68233]: _type = "Task" [ 1084.762275] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.768568] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783044, 'name': Rename_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.773019] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783045, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.874866] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.875229] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.876020] env[68233]: DEBUG nova.compute.manager [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Going to confirm migration 3 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1084.996152] env[68233]: DEBUG nova.compute.manager [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1085.272544] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783044, 'name': Rename_Task, 'duration_secs': 0.421326} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.273971] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.273971] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec84e832-e00b-47ff-ae47-02933c65c501 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.280409] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783045, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.286726] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1085.286726] env[68233]: value = "task-2783046" [ 1085.286726] env[68233]: _type = "Task" [ 1085.286726] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.297385] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783046, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.344083] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82bc139b-0ff1-483e-892b-d88f4e381500 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.354640] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f600af4-6df1-4214-8b60-eb9fac425eef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.400076] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e168d57-40af-4056-a90f-dedca86e8d7b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.410989] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c3dab0-d316-44e1-8cd1-8f52d0b05b0a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.425484] env[68233]: DEBUG nova.compute.provider_tree [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.440544] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.440864] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.440964] env[68233]: DEBUG nova.network.neutron [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.441171] env[68233]: DEBUG nova.objects.instance [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'info_cache' on Instance uuid 4a388705-7e00-45dc-8891-c6e587b1cdb8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1085.521661] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.774174] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783045, 'name': ReconfigVM_Task, 'duration_secs': 0.645999} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.774231] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfigured VM instance instance-0000005f to attach disk [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c/35587446-6f3b-465b-a2a6-0b154374734c.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1085.775659] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_secret_uuid': None, 'device_type': 'disk', 'size': 0, 'encryption_format': None, 'encryption_options': None, 'boot_index': 0, 'encrypted': False, 'device_name': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'image_id': 'da133fda-e1e2-42a1-a7e0-b8b1426a8490'}], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': False, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'}, 'boot_index': None, 'attachment_id': '4dd281bc-de69-4af0-b9a9-a3c1a6dc92a0', 'mount_device': '/dev/sdb', 'disk_bus': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68233) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1085.775659] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1085.775839] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1085.777106] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7244e7da-7a8f-42bb-9784-2750c8933a18 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.796356] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76bc83a3-71f1-4895-881f-f0ab1efccd5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.803469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.803709] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.830683] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfiguring VM instance instance-0000005f to attach disk [datastore2] volume-81086a97-e05a-4835-bd9a-78b8c85c5a53/volume-81086a97-e05a-4835-bd9a-78b8c85c5a53.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.831022] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783046, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.834995] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b42fb047-adb4-4e7a-b69b-71ad0a8afff8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.857461] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1085.857461] env[68233]: value = "task-2783047" [ 1085.857461] env[68233]: _type = "Task" [ 1085.857461] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.866668] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783047, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.930052] env[68233]: DEBUG nova.scheduler.client.report [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1086.058924] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "ac108b76-385d-40c2-992c-dc7561227130" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.059212] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "ac108b76-385d-40c2-992c-dc7561227130" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.302659] env[68233]: DEBUG oslo_vmware.api [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783046, 'name': PowerOnVM_Task, 'duration_secs': 0.607085} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.302943] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1086.303298] env[68233]: INFO nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Took 7.80 seconds to spawn the instance on the hypervisor. [ 1086.303490] env[68233]: DEBUG nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1086.304314] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa6781b4-2e41-4d67-8326-545010fae9be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.334882] env[68233]: DEBUG nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1086.369645] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.435081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.435636] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1086.438929] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.637s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.439171] env[68233]: DEBUG nova.objects.instance [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lazy-loading 'resources' on Instance uuid 11ec9800-fa7e-4dbd-bdc1-63d0b496589f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.561197] env[68233]: DEBUG nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1086.688934] env[68233]: DEBUG nova.network.neutron [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [{"id": "cc05db07-a36a-494d-92b6-af58fdd9d143", "address": "fa:16:3e:8c:9c:8b", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc05db07-a3", "ovs_interfaceid": "cc05db07-a36a-494d-92b6-af58fdd9d143", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.821422] env[68233]: INFO nova.compute.manager [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Took 26.78 seconds to build instance. [ 1086.868021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.879062] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783047, 'name': ReconfigVM_Task, 'duration_secs': 0.790352} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.879062] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfigured VM instance instance-0000005f to attach disk [datastore2] volume-81086a97-e05a-4835-bd9a-78b8c85c5a53/volume-81086a97-e05a-4835-bd9a-78b8c85c5a53.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1086.886203] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07056755-ad5c-4bd6-9456-c7f76f0d4485 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.904733] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1086.904733] env[68233]: value = "task-2783048" [ 1086.904733] env[68233]: _type = "Task" [ 1086.904733] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.914116] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.945093] env[68233]: DEBUG nova.compute.utils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1086.947402] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1086.947402] env[68233]: DEBUG nova.network.neutron [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1087.012148] env[68233]: DEBUG nova.policy [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3be589685b874d76b3753c06d4fc0877', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a664e5702b9d44908d10f7e0f75ffce6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1087.084180] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.180579] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9596b37-bd74-447a-8f68-b8219815f9fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.190972] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320cec12-934d-4dfe-9614-1ac914f2fb67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.197540] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-4a388705-7e00-45dc-8891-c6e587b1cdb8" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.197859] env[68233]: DEBUG nova.objects.instance [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'migration_context' on Instance uuid 4a388705-7e00-45dc-8891-c6e587b1cdb8 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1087.233170] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a0ab9d-1393-4382-a312-c89a97f518b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.242132] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5142f6a1-b74d-4988-b162-0003c80e9b01 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.258507] env[68233]: DEBUG nova.compute.provider_tree [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.323917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ff3b2557-cfd9-4659-8453-5cfcf6ab0b04 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.288s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.415555] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783048, 'name': ReconfigVM_Task, 'duration_secs': 0.275129} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.415980] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1087.416919] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51ebc3af-8245-4ebd-9ce0-c54a2db4f6d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.423320] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1087.423320] env[68233]: value = "task-2783049" [ 1087.423320] env[68233]: _type = "Task" [ 1087.423320] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.433508] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783049, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.453374] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1087.634214] env[68233]: DEBUG nova.network.neutron [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Successfully created port: 16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1087.703665] env[68233]: DEBUG nova.objects.base [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Object Instance<4a388705-7e00-45dc-8891-c6e587b1cdb8> lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1087.703665] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e72ce1f-dd85-4566-9ee1-385aacea7ad9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.722652] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9c74bc4-3af6-4339-b80b-b5e07b1150d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.728575] env[68233]: DEBUG oslo_vmware.api [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1087.728575] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524835c5-ee48-7b05-a8c9-72a168f6f36a" [ 1087.728575] env[68233]: _type = "Task" [ 1087.728575] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.738130] env[68233]: DEBUG oslo_vmware.api [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524835c5-ee48-7b05-a8c9-72a168f6f36a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.762477] env[68233]: DEBUG nova.scheduler.client.report [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.935132] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783049, 'name': Rename_Task, 'duration_secs': 0.33079} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.935132] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.935132] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a1a0d6a-3cdd-40ec-9032-39c4df026fad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.942095] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1087.942095] env[68233]: value = "task-2783050" [ 1087.942095] env[68233]: _type = "Task" [ 1087.942095] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.954194] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783050, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.240560] env[68233]: DEBUG oslo_vmware.api [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524835c5-ee48-7b05-a8c9-72a168f6f36a, 'name': SearchDatastore_Task, 'duration_secs': 0.0127} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.241486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.267840] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.829s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.271070] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.823s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.271070] env[68233]: DEBUG nova.objects.instance [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'resources' on Instance uuid 21cc2aa9-8c88-4aa1-8847-bf7f469ca991 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.290765] env[68233]: INFO nova.scheduler.client.report [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Deleted allocations for instance 11ec9800-fa7e-4dbd-bdc1-63d0b496589f [ 1088.452672] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783050, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.465331] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1088.492433] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1088.492710] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1088.492962] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1088.493233] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1088.493398] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1088.493548] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1088.493764] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1088.493943] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1088.494167] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1088.494322] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1088.494501] env[68233]: DEBUG nova.virt.hardware [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1088.495382] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d74faf-e1d1-4c7f-ba99-8ae1791d22ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.504666] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6799ac-2845-43c0-8d6c-276a048d0d00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.525553] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "72c7e272-dd92-40a5-875b-3edfa1ad282b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.525832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.526028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "72c7e272-dd92-40a5-875b-3edfa1ad282b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1088.526217] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.526389] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.529206] env[68233]: INFO nova.compute.manager [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Terminating instance [ 1088.798720] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a96161a6-da7c-45ce-9924-969cae8c6054 tempest-ServersTestJSON-367674289 tempest-ServersTestJSON-367674289-project-member] Lock "11ec9800-fa7e-4dbd-bdc1-63d0b496589f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.607s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.953191] env[68233]: DEBUG oslo_vmware.api [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783050, 'name': PowerOnVM_Task, 'duration_secs': 0.727002} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.954271] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.954488] env[68233]: DEBUG nova.compute.manager [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.955289] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57081e70-63b4-4215-b33c-47dfc1486c36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.958256] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c144853d-632b-464a-bf16-10dee910b434 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.966878] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1441eb53-ef08-44d9-a475-844c7ff3f441 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.002047] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcc0a9a-1a98-4b79-a4f3-81fc36241454 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.009633] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdb5b11-0b75-4d42-aeb3-d3b66b26db69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.025159] env[68233]: DEBUG nova.compute.provider_tree [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.033519] env[68233]: DEBUG nova.compute.manager [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1089.033797] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1089.034623] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b3f04b-2fe6-469d-a715-41154cdc1c04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.042633] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1089.043047] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67256dcf-12da-4804-966e-4f103c5e1ebf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.050319] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1089.050319] env[68233]: value = "task-2783051" [ 1089.050319] env[68233]: _type = "Task" [ 1089.050319] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.057494] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783051, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.418896] env[68233]: DEBUG nova.compute.manager [req-43327e83-43d9-4549-b594-7c04b28c5de9 req-3aa1836b-7bf5-43a0-a2b5-cf5e7797c867 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Received event network-vif-plugged-16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1089.418896] env[68233]: DEBUG oslo_concurrency.lockutils [req-43327e83-43d9-4549-b594-7c04b28c5de9 req-3aa1836b-7bf5-43a0-a2b5-cf5e7797c867 service nova] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.419262] env[68233]: DEBUG oslo_concurrency.lockutils [req-43327e83-43d9-4549-b594-7c04b28c5de9 req-3aa1836b-7bf5-43a0-a2b5-cf5e7797c867 service nova] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.419312] env[68233]: DEBUG oslo_concurrency.lockutils [req-43327e83-43d9-4549-b594-7c04b28c5de9 req-3aa1836b-7bf5-43a0-a2b5-cf5e7797c867 service nova] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.419487] env[68233]: DEBUG nova.compute.manager [req-43327e83-43d9-4549-b594-7c04b28c5de9 req-3aa1836b-7bf5-43a0-a2b5-cf5e7797c867 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] No waiting events found dispatching network-vif-plugged-16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.419654] env[68233]: WARNING nova.compute.manager [req-43327e83-43d9-4549-b594-7c04b28c5de9 req-3aa1836b-7bf5-43a0-a2b5-cf5e7797c867 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Received unexpected event network-vif-plugged-16ec4545-d69d-43bf-a956-54414f895c1e for instance with vm_state building and task_state spawning. [ 1089.483228] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.528834] env[68233]: DEBUG nova.scheduler.client.report [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1089.559848] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783051, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.908729] env[68233]: DEBUG nova.network.neutron [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Successfully updated port: 16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1089.927591] env[68233]: DEBUG nova.compute.manager [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Received event network-changed-16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1089.928023] env[68233]: DEBUG nova.compute.manager [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Refreshing instance network info cache due to event network-changed-16ec4545-d69d-43bf-a956-54414f895c1e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1089.928829] env[68233]: DEBUG oslo_concurrency.lockutils [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] Acquiring lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.928829] env[68233]: DEBUG oslo_concurrency.lockutils [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] Acquired lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.929543] env[68233]: DEBUG nova.network.neutron [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Refreshing network info cache for port 16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1090.034623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.037595] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.532s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.037595] env[68233]: DEBUG nova.objects.instance [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'resources' on Instance uuid 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1090.061680] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783051, 'name': PowerOffVM_Task, 'duration_secs': 0.543077} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.062707] env[68233]: INFO nova.scheduler.client.report [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocations for instance 21cc2aa9-8c88-4aa1-8847-bf7f469ca991 [ 1090.063755] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.063934] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.066770] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f16bd28d-067f-4605-a706-b40d1c7cffa3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.156862] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1090.157171] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1090.157315] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Deleting the datastore file [datastore2] 72c7e272-dd92-40a5-875b-3edfa1ad282b {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1090.157623] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f292cc0-0c73-4394-b1e8-51610aa0da78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.164645] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for the task: (returnval){ [ 1090.164645] env[68233]: value = "task-2783053" [ 1090.164645] env[68233]: _type = "Task" [ 1090.164645] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.173209] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.411749] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.495159] env[68233]: DEBUG nova.network.neutron [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.572965] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4f112a0f-514f-480f-b3b6-c3a4379578d6 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "21cc2aa9-8c88-4aa1-8847-bf7f469ca991" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.121s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.600511] env[68233]: DEBUG nova.network.neutron [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.676903] env[68233]: DEBUG oslo_vmware.api [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Task: {'id': task-2783053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151166} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.677163] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1090.677349] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1090.677526] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1090.677689] env[68233]: INFO nova.compute.manager [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1090.677939] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.678241] env[68233]: DEBUG nova.compute.manager [-] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1090.678241] env[68233]: DEBUG nova.network.neutron [-] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1090.710857] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf1077d-27d0-4185-b4d5-4e213aeda43e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.718811] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96183e0-43f8-4cb7-b76b-72caa61f6959 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.751904] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9a62d5-de17-478f-9ee0-f5dcb0285e0a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.758788] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b366047e-2433-4b3a-a151-778032352ff4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.772028] env[68233]: DEBUG nova.compute.provider_tree [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1091.103039] env[68233]: DEBUG oslo_concurrency.lockutils [req-0fc2a70f-029f-402e-84c9-dd255e8fb64b req-9bf1daf5-39cc-4c36-998c-f1b245c6ab4a service nova] Releasing lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.103204] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.103356] env[68233]: DEBUG nova.network.neutron [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.275968] env[68233]: DEBUG nova.scheduler.client.report [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1091.377454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.377696] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.456324] env[68233]: DEBUG nova.network.neutron [-] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.658282] env[68233]: DEBUG nova.network.neutron [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1091.751742] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.751866] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.781409] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.744s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.783558] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.262s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.807374] env[68233]: INFO nova.scheduler.client.report [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted allocations for instance 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 [ 1091.815921] env[68233]: DEBUG nova.network.neutron [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating instance_info_cache with network_info: [{"id": "16ec4545-d69d-43bf-a956-54414f895c1e", "address": "fa:16:3e:82:05:b4", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ec4545-d6", "ovs_interfaceid": "16ec4545-d69d-43bf-a956-54414f895c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.880019] env[68233]: DEBUG nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1091.959352] env[68233]: INFO nova.compute.manager [-] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Took 1.28 seconds to deallocate network for instance. [ 1092.002703] env[68233]: DEBUG nova.compute.manager [req-7ee941f7-a93a-47b3-a713-b45ada1b748d req-12b62195-3d91-4fb5-a3f4-08e6a8dd911a service nova] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Received event network-vif-deleted-bfd74e6f-d2a1-48db-8918-792fc8175d83 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.254374] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1092.288591] env[68233]: INFO nova.compute.claims [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.315772] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2082f07b-c847-464a-bfab-2f90ae66b103 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.707s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1092.316619] env[68233]: DEBUG oslo_concurrency.lockutils [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Acquired lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.317585] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f7d051-08d1-4e70-b4c3-518adffceb9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.320733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.321080] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Instance network_info: |[{"id": "16ec4545-d69d-43bf-a956-54414f895c1e", "address": "fa:16:3e:82:05:b4", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ec4545-d6", "ovs_interfaceid": "16ec4545-d69d-43bf-a956-54414f895c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.321525] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:05:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16ec4545-d69d-43bf-a956-54414f895c1e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.329317] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Creating folder: Project (a664e5702b9d44908d10f7e0f75ffce6). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.330406] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-993de082-db30-4544-a8a8-c42ae5e16e74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.335071] env[68233]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1092.335220] env[68233]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=68233) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1092.335779] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddab7a2e-fdbf-4c18-a5f4-2dcc4c495203 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.341581] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Created folder: Project (a664e5702b9d44908d10f7e0f75ffce6) in parent group-v559223. [ 1092.341755] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Creating folder: Instances. Parent ref: group-v559504. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1092.342621] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5154fbbd-f912-451e-988c-1c01c1d67960 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.346587] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455c4f8e-459c-4d63-8258-573ca1f865d9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.364185] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Created folder: Instances in parent group-v559504. [ 1092.364413] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.364588] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.364774] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15ae12ec-14b9-4e74-9005-3e20507c9e13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.391167] env[68233]: ERROR root [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-559475' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-559475' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-559475' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-559475'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-559475' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-559475' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-559475'}\n"]: nova.exception.InstanceNotFound: Instance 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 could not be found. [ 1092.391362] env[68233]: DEBUG oslo_concurrency.lockutils [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] Releasing lock "0bde10dc-6762-49fb-9c0d-6b104a3cfa39" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.391567] env[68233]: DEBUG nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Detach interface failed, port_id=ae386ac0-a953-4d25-b510-beafa8a7c6d8, reason: Instance 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1092.391754] env[68233]: DEBUG nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Received event network-vif-deleted-44f782f3-8c83-4b99-bb5d-1409aa3a4ddd {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1092.391942] env[68233]: INFO nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Neutron deleted interface 44f782f3-8c83-4b99-bb5d-1409aa3a4ddd; detaching it from the instance and deleting it from the info cache [ 1092.392197] env[68233]: DEBUG nova.network.neutron [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Updating instance_info_cache with network_info: [{"id": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "address": "fa:16:3e:c7:b3:57", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.245", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d72ccc2-ee", "ovs_interfaceid": "7d72ccc2-ee10-4121-9a73-41bc93e7493e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.400012] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1092.400012] env[68233]: value = "task-2783056" [ 1092.400012] env[68233]: _type = "Task" [ 1092.400012] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.406381] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.410570] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783056, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.470090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.774367] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.794408] env[68233]: INFO nova.compute.resource_tracker [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating resource usage from migration 436655a2-141d-496f-bad4-23dbb56d5f94 [ 1092.899114] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74ee2902-e48a-4f7a-8770-17d7b716821b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.907198] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f21478a-4f24-401f-a35e-4380aeb90ea5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.922216] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783056, 'name': CreateVM_Task, 'duration_secs': 0.445516} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.922863] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1092.923558] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.923725] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.924181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1092.924616] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-669de217-c526-46b0-9e87-c8ab53c506d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.929014] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1092.929014] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5210ca84-e403-e4a4-bdd8-3055664609b6" [ 1092.929014] env[68233]: _type = "Task" [ 1092.929014] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.943243] env[68233]: DEBUG nova.compute.manager [req-f0106863-2e67-4dad-ae32-18f055c1f998 req-c9e97f63-a51b-417d-ae0b-323d0a667bae service nova] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Detach interface failed, port_id=44f782f3-8c83-4b99-bb5d-1409aa3a4ddd, reason: Instance 0bde10dc-6762-49fb-9c0d-6b104a3cfa39 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1092.948772] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5210ca84-e403-e4a4-bdd8-3055664609b6, 'name': SearchDatastore_Task, 'duration_secs': 0.01048} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.948881] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.949207] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.949338] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.949486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.949661] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.950201] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed7ad66a-2004-416a-ae01-60176876968c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.959750] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.960024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1092.960630] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c32f965-46be-4d89-9a41-67973511c889 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.965739] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1092.965739] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5242f516-5f3c-2408-f315-cb4d9da90161" [ 1092.965739] env[68233]: _type = "Task" [ 1092.965739] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.979024] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5242f516-5f3c-2408-f315-cb4d9da90161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.996990] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff8c98e-e680-4800-8868-975c3c3c246b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.004020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267f7f98-8da6-491b-bdff-3184057072a8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.035203] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f732ca6-445d-4d01-bd5c-a5519435a5a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.042232] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ce2193-47a4-4063-95c5-6bc4a7c8117d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.055493] env[68233]: DEBUG nova.compute.provider_tree [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1093.477054] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5242f516-5f3c-2408-f315-cb4d9da90161, 'name': SearchDatastore_Task, 'duration_secs': 0.007693} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.477277] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dd14883-da87-4a9a-add5-3a113bfc043e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.482447] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1093.482447] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522fe0f0-50d9-c68f-46f9-541613c50ba7" [ 1093.482447] env[68233]: _type = "Task" [ 1093.482447] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.490238] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522fe0f0-50d9-c68f-46f9-541613c50ba7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.562581] env[68233]: DEBUG nova.scheduler.client.report [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.992921] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]522fe0f0-50d9-c68f-46f9-541613c50ba7, 'name': SearchDatastore_Task, 'duration_secs': 0.010926} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.993256] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.993509] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c2d04b37-3eae-46cb-a227-b62d36c62a6a/c2d04b37-3eae-46cb-a227-b62d36c62a6a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1093.993762] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2777d53a-eda3-4665-997f-9955e8067962 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.000431] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1094.000431] env[68233]: value = "task-2783057" [ 1094.000431] env[68233]: _type = "Task" [ 1094.000431] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.007571] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.068236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.285s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.068484] env[68233]: INFO nova.compute.manager [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Migrating [ 1094.074711] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.207s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.076179] env[68233]: INFO nova.compute.claims [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1094.313270] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.313528] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.511055] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.441631} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.511339] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] c2d04b37-3eae-46cb-a227-b62d36c62a6a/c2d04b37-3eae-46cb-a227-b62d36c62a6a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.511553] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.511796] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fed76773-df36-4871-b5ba-b9c63e531f4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.517482] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1094.517482] env[68233]: value = "task-2783058" [ 1094.517482] env[68233]: _type = "Task" [ 1094.517482] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.524565] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.588246] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.588431] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.588602] env[68233]: DEBUG nova.network.neutron [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1094.816349] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1095.027762] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.303243} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.028072] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.028860] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8a6115-06e7-40c4-a6d8-fb0e90a3e366 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.051316] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] c2d04b37-3eae-46cb-a227-b62d36c62a6a/c2d04b37-3eae-46cb-a227-b62d36c62a6a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.051604] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb6f80b6-6c7b-4715-ab0e-7bae706b58df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.071831] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1095.071831] env[68233]: value = "task-2783059" [ 1095.071831] env[68233]: _type = "Task" [ 1095.071831] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.079781] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.280357] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd8b825-44cf-4687-b18c-97176fb006d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.288056] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2645f5-3dba-4f88-adaf-c4eab19f038f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.319253] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f03430-da94-44cb-bc48-94dcd01b4c6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.324797] env[68233]: DEBUG nova.network.neutron [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [{"id": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "address": "fa:16:3e:26:3b:ea", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f0efdc2-cf", "ovs_interfaceid": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.332020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4743925a-cead-40fd-b823-0d59d9ada21c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.346018] env[68233]: DEBUG nova.compute.provider_tree [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.348071] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.582510] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783059, 'name': ReconfigVM_Task, 'duration_secs': 0.349333} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.582510] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] c2d04b37-3eae-46cb-a227-b62d36c62a6a/c2d04b37-3eae-46cb-a227-b62d36c62a6a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.583070] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dd33634-c6eb-4c8c-a6c4-ebc3e898f54a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.588767] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1095.588767] env[68233]: value = "task-2783060" [ 1095.588767] env[68233]: _type = "Task" [ 1095.588767] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.595801] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783060, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.829417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.850069] env[68233]: DEBUG nova.scheduler.client.report [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1096.098722] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783060, 'name': Rename_Task, 'duration_secs': 0.132936} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.099012] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.099268] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ffb722a-3542-410c-b8b5-f2d6aca2de1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.105382] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1096.105382] env[68233]: value = "task-2783061" [ 1096.105382] env[68233]: _type = "Task" [ 1096.105382] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.112557] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783061, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.355027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.355544] env[68233]: DEBUG nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1096.360331] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.274s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.360331] env[68233]: INFO nova.compute.claims [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1096.617105] env[68233]: DEBUG oslo_vmware.api [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783061, 'name': PowerOnVM_Task, 'duration_secs': 0.471955} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.617105] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1096.617105] env[68233]: INFO nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1096.617105] env[68233]: DEBUG nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1096.617731] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2b5948-8453-4729-a927-56c778ca5383 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.863642] env[68233]: DEBUG nova.compute.utils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1096.866035] env[68233]: DEBUG nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1097.140087] env[68233]: INFO nova.compute.manager [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Took 27.96 seconds to build instance. [ 1097.345353] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b6ce1a-83c7-40e8-ba3b-5072a21e3493 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.363172] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1097.368072] env[68233]: DEBUG nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1097.537680] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67ee447-8d1a-4cbf-a440-5116a848b7c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.545791] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdaffdb4-04ae-495d-a340-de86e7890e3b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.578712] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b4b488-8021-4495-a6be-609dd409432f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.586861] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88799956-998d-400b-a8a9-bf132c4381a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.600440] env[68233]: DEBUG nova.compute.provider_tree [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.641841] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0913762d-9a73-4213-a51e-a029fc9c0187 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.477s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.868879] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1097.869224] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6919eb0-8650-4fa2-8dd1-09f04daf1d74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.879770] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1097.879770] env[68233]: value = "task-2783062" [ 1097.879770] env[68233]: _type = "Task" [ 1097.879770] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.887637] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783062, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.032485] env[68233]: DEBUG nova.compute.manager [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Received event network-changed-16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1098.033233] env[68233]: DEBUG nova.compute.manager [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Refreshing instance network info cache due to event network-changed-16ec4545-d69d-43bf-a956-54414f895c1e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1098.033233] env[68233]: DEBUG oslo_concurrency.lockutils [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] Acquiring lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.033434] env[68233]: DEBUG oslo_concurrency.lockutils [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] Acquired lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.034151] env[68233]: DEBUG nova.network.neutron [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Refreshing network info cache for port 16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.106571] env[68233]: DEBUG nova.scheduler.client.report [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1098.379312] env[68233]: DEBUG nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1098.390624] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783062, 'name': PowerOffVM_Task, 'duration_secs': 0.22117} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.390887] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1098.391114] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1098.404346] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.404597] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.404716] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.404893] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.405049] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.405201] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.405441] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.405607] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.406063] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.406063] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.406154] env[68233]: DEBUG nova.virt.hardware [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.406866] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59ce60b-ca83-4ba1-9d33-1f27729dac96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.413870] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6914c797-48f7-4f25-a011-1637d41ba434 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.426801] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.432270] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Creating folder: Project (53e31d144ab4481da784df3e34a45cce). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1098.433183] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a02facdc-105f-4b0a-9af4-61a4def71cd4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.442646] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Created folder: Project (53e31d144ab4481da784df3e34a45cce) in parent group-v559223. [ 1098.442824] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Creating folder: Instances. Parent ref: group-v559507. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1098.443041] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e24a92e-a7e4-4961-b92b-c3039017eadf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.451239] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Created folder: Instances in parent group-v559507. [ 1098.451455] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.451628] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.451812] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02935fd6-52bf-4a03-937d-54a0483e2ff9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.466974] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.466974] env[68233]: value = "task-2783065" [ 1098.466974] env[68233]: _type = "Task" [ 1098.466974] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.473872] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783065, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.612248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.612835] env[68233]: DEBUG nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1098.615507] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 10.375s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.757413] env[68233]: DEBUG nova.network.neutron [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updated VIF entry in instance network info cache for port 16ec4545-d69d-43bf-a956-54414f895c1e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.757768] env[68233]: DEBUG nova.network.neutron [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating instance_info_cache with network_info: [{"id": "16ec4545-d69d-43bf-a956-54414f895c1e", "address": "fa:16:3e:82:05:b4", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ec4545-d6", "ovs_interfaceid": "16ec4545-d69d-43bf-a956-54414f895c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.897015] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1098.897247] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1098.897411] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1098.897595] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1098.897738] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1098.897881] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1098.898100] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1098.898263] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1098.898424] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1098.898585] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1098.898753] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1098.903957] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7df4c60c-d9cb-4230-88e8-a26a59dd864e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.920344] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1098.920344] env[68233]: value = "task-2783066" [ 1098.920344] env[68233]: _type = "Task" [ 1098.920344] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.928944] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783066, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.978198] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783065, 'name': CreateVM_Task, 'duration_secs': 0.256084} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.978388] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.978828] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.978991] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.979354] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1098.979637] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ab7d588-b7ad-4aca-aa17-1beae7e6c6db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.984606] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1098.984606] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5259e582-8cc5-ba02-8b26-2baa26d17599" [ 1098.984606] env[68233]: _type = "Task" [ 1098.984606] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.991628] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5259e582-8cc5-ba02-8b26-2baa26d17599, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.118523] env[68233]: DEBUG nova.compute.utils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1099.122748] env[68233]: DEBUG nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1099.260022] env[68233]: DEBUG oslo_concurrency.lockutils [req-aceef025-03ac-4f23-b7e4-f1a1c41f1bca req-3d147ae0-cf09-46a2-a1d5-665e8d0fecc0 service nova] Releasing lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.302386] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53b288d-c9c4-498c-a6e4-5f1c8ea61333 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.309546] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5e6cd9-b569-4cee-8736-67b20d3db205 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.339097] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6744221-600a-46f7-8ddc-50d99008df98 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.346451] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e95d3ba-b012-45f5-924d-2067db164aac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.360754] env[68233]: DEBUG nova.compute.provider_tree [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.430664] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783066, 'name': ReconfigVM_Task, 'duration_secs': 0.216061} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.430966] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1099.494841] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5259e582-8cc5-ba02-8b26-2baa26d17599, 'name': SearchDatastore_Task, 'duration_secs': 0.008077} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.495139] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.495379] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.495613] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.495760] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.495935] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.496213] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c9fd2fb-4604-4267-844d-c245b71303b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.503745] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.503915] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.504724] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-598ea4ab-7a6f-4b38-9f81-0c8a8b07b8b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.509722] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1099.509722] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ba54af-6ab6-c71f-c800-9eedf7aee8c5" [ 1099.509722] env[68233]: _type = "Task" [ 1099.509722] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.516850] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ba54af-6ab6-c71f-c800-9eedf7aee8c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.623744] env[68233]: DEBUG nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1099.864723] env[68233]: DEBUG nova.scheduler.client.report [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.937980] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1099.938230] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1099.938382] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1099.938510] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1099.938673] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1099.938820] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1099.938999] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1099.939171] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1099.939339] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1099.939499] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1099.939670] env[68233]: DEBUG nova.virt.hardware [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1099.944881] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Reconfiguring VM instance instance-00000067 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1099.945455] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e40a119-2d71-4e0c-8312-f4674d88aa78 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.964254] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1099.964254] env[68233]: value = "task-2783067" [ 1099.964254] env[68233]: _type = "Task" [ 1099.964254] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.971848] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783067, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.019699] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ba54af-6ab6-c71f-c800-9eedf7aee8c5, 'name': SearchDatastore_Task, 'duration_secs': 0.007745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.020503] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cf7c23a-07a1-45b2-a70b-8d80a77057d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.026191] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1100.026191] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cf84b4-d608-0213-05a0-cd63f1390eab" [ 1100.026191] env[68233]: _type = "Task" [ 1100.026191] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.033928] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cf84b4-d608-0213-05a0-cd63f1390eab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.474554] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783067, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.536249] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cf84b4-d608-0213-05a0-cd63f1390eab, 'name': SearchDatastore_Task, 'duration_secs': 0.033961} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.536488] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.536742] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f53dccfc-9d0d-4eea-b94c-8527f707c5c2/f53dccfc-9d0d-4eea-b94c-8527f707c5c2.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.536975] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e9a78aa-c691-4ae6-88d2-4da955fa01e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.542619] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1100.542619] env[68233]: value = "task-2783068" [ 1100.542619] env[68233]: _type = "Task" [ 1100.542619] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.551080] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.634244] env[68233]: DEBUG nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1100.660712] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1100.660997] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1100.661192] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1100.661407] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1100.661590] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1100.661762] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1100.662010] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1100.662208] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1100.662394] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1100.662634] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1100.662929] env[68233]: DEBUG nova.virt.hardware [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1100.663838] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f70c3a3f-5f84-498d-953d-fbff3619cffb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.672593] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7f8a78-213b-4a76-bafa-093b17ecca4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.687116] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1100.692475] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1100.692706] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1100.692911] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab340cf2-6ab2-4964-9023-b1ebb5b9172d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.708523] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1100.708523] env[68233]: value = "task-2783069" [ 1100.708523] env[68233]: _type = "Task" [ 1100.708523] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.716053] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783069, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.875911] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.260s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.879020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.396s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.879232] env[68233]: DEBUG nova.objects.instance [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1100.979217] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783067, 'name': ReconfigVM_Task, 'duration_secs': 0.512681} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.979612] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Reconfigured VM instance instance-00000067 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1100.980577] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72af3b9c-c630-40e7-96cd-63909d54b9f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.006603] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929/7025be4e-b800-42c8-a2c0-3ea059d3b929.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.006879] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08b09df4-891d-462d-821d-cff0940eafac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.027160] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1101.027160] env[68233]: value = "task-2783070" [ 1101.027160] env[68233]: _type = "Task" [ 1101.027160] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.037183] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.052444] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783068, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.218791] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783069, 'name': CreateVM_Task, 'duration_secs': 0.45766} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.218791] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1101.218791] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.220029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.220029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1101.220029] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3e1d67f-64ab-4007-89b0-71fcfc209111 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.224113] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1101.224113] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a1ec96-5f1d-b8d6-c77e-f6e132602317" [ 1101.224113] env[68233]: _type = "Task" [ 1101.224113] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.231724] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a1ec96-5f1d-b8d6-c77e-f6e132602317, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.437958] env[68233]: INFO nova.scheduler.client.report [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted allocation for migration 834923b8-fcd6-4c82-9d81-2d5cf1fa91e5 [ 1101.538617] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783070, 'name': ReconfigVM_Task, 'duration_secs': 0.298191} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.538884] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929/7025be4e-b800-42c8-a2c0-3ea059d3b929.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1101.539142] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1101.553038] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519251} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.553038] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f53dccfc-9d0d-4eea-b94c-8527f707c5c2/f53dccfc-9d0d-4eea-b94c-8527f707c5c2.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.553038] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.553038] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa08ca10-8a71-4a23-b916-a9b7b4734dff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.559582] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1101.559582] env[68233]: value = "task-2783071" [ 1101.559582] env[68233]: _type = "Task" [ 1101.559582] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.567520] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.735032] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a1ec96-5f1d-b8d6-c77e-f6e132602317, 'name': SearchDatastore_Task, 'duration_secs': 0.011621} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.735032] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.735178] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1101.735392] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1101.735530] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1101.735704] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1101.735959] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8a05be6-f136-4e51-9bd1-659c14fb078d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.753642] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1101.753817] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1101.754581] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-147752eb-757a-4af3-8177-aaf3783caaed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.759870] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1101.759870] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523d9f3d-782b-1b81-16bc-c03994a2c825" [ 1101.759870] env[68233]: _type = "Task" [ 1101.759870] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.767450] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523d9f3d-782b-1b81-16bc-c03994a2c825, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.890726] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ea76a8b5-e3ce-4dec-9adc-b31dbaa73778 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.891854] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.486s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.893838] env[68233]: INFO nova.compute.claims [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.944397] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c09859f1-9d5f-4230-9c7c-9909e2ce9936 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 17.069s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.045981] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5248b176-d73d-4f12-8d1b-1313e6bf1495 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.067618] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d116b11c-ecc8-4ac4-856d-aba79d9ed175 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.087924] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079171} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.088466] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1102.091591] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1102.092503] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc99d41-d74f-4233-bfa0-3284ddd87ed7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.111315] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] f53dccfc-9d0d-4eea-b94c-8527f707c5c2/f53dccfc-9d0d-4eea-b94c-8527f707c5c2.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.111547] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3490d66b-7abb-4240-8a88-3bd64c6719bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.130665] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1102.130665] env[68233]: value = "task-2783072" [ 1102.130665] env[68233]: _type = "Task" [ 1102.130665] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.138266] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783072, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.269823] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523d9f3d-782b-1b81-16bc-c03994a2c825, 'name': SearchDatastore_Task, 'duration_secs': 0.054907} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.270631] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-956246fb-bdd2-4c3f-b371-1d7de1c9eb34 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.275628] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1102.275628] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5224c879-1290-6273-7bfe-3a63a366d02d" [ 1102.275628] env[68233]: _type = "Task" [ 1102.275628] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.282818] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5224c879-1290-6273-7bfe-3a63a366d02d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.292316] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.292538] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.292733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.292914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1102.293096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.295250] env[68233]: INFO nova.compute.manager [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Terminating instance [ 1102.626658] env[68233]: DEBUG nova.network.neutron [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Port 4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1102.642522] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.785595] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5224c879-1290-6273-7bfe-3a63a366d02d, 'name': SearchDatastore_Task, 'duration_secs': 0.042559} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.785863] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.786139] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1102.786396] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a1f7790-d6a6-46e4-925c-64675befbdec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.793617] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1102.793617] env[68233]: value = "task-2783073" [ 1102.793617] env[68233]: _type = "Task" [ 1102.793617] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.798475] env[68233]: DEBUG nova.compute.manager [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1102.798678] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1102.802145] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad1f71f-03f3-4ed4-9ddf-4582bfaf353c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.804614] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783073, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.808728] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.808958] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b047347a-737d-4552-bcb3-77df7605eb86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.814252] env[68233]: DEBUG oslo_vmware.api [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1102.814252] env[68233]: value = "task-2783074" [ 1102.814252] env[68233]: _type = "Task" [ 1102.814252] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.822144] env[68233]: DEBUG oslo_vmware.api [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.090981] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429b0ab0-d4ae-4af6-9a1d-143437ad550e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.099671] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d35db2-cc40-415a-b972-9f6820f2fbf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.136603] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7cfba4-f889-4465-990d-d09be997b797 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.151027] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-861c9198-aa70-457f-b5cc-45a3a67cb3fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.155273] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783072, 'name': ReconfigVM_Task, 'duration_secs': 0.675107} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.155761] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Reconfigured VM instance instance-0000006a to attach disk [datastore2] f53dccfc-9d0d-4eea-b94c-8527f707c5c2/f53dccfc-9d0d-4eea-b94c-8527f707c5c2.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.156686] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccb0ee47-7584-4ed7-a21c-8761afd76423 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.168079] env[68233]: DEBUG nova.compute.provider_tree [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1103.171108] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1103.171108] env[68233]: value = "task-2783075" [ 1103.171108] env[68233]: _type = "Task" [ 1103.171108] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.179787] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783075, 'name': Rename_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.303785] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783073, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450526} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.304058] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1103.304323] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1103.304565] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2d66adab-099d-4ee1-8c4b-23d17d663031 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.311864] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1103.311864] env[68233]: value = "task-2783076" [ 1103.311864] env[68233]: _type = "Task" [ 1103.311864] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.322757] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.325596] env[68233]: DEBUG oslo_vmware.api [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783074, 'name': PowerOffVM_Task, 'duration_secs': 0.188055} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.325835] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.325996] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1103.326280] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92d208aa-5a8a-4209-b90a-57913d19b49c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.389970] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1103.390235] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1103.390429] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleting the datastore file [datastore2] 4a388705-7e00-45dc-8891-c6e587b1cdb8 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.390700] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ddbed70e-f7e3-4be6-8e71-d5c1e38cad04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.397317] env[68233]: DEBUG oslo_vmware.api [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1103.397317] env[68233]: value = "task-2783078" [ 1103.397317] env[68233]: _type = "Task" [ 1103.397317] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.408464] env[68233]: DEBUG oslo_vmware.api [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783078, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.654546] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.654855] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.654953] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.686088] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783075, 'name': Rename_Task, 'duration_secs': 0.166625} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.686088] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.686088] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e031511b-3990-4d9b-bc7c-7e371df3d9ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.689836] env[68233]: ERROR nova.scheduler.client.report [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [req-5653d739-8fb1-48ed-8565-319f53583f51] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5653d739-8fb1-48ed-8565-319f53583f51"}]} [ 1103.693769] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1103.693769] env[68233]: value = "task-2783079" [ 1103.693769] env[68233]: _type = "Task" [ 1103.693769] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.701385] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783079, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.709129] env[68233]: DEBUG nova.scheduler.client.report [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1103.724111] env[68233]: DEBUG nova.scheduler.client.report [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1103.724382] env[68233]: DEBUG nova.compute.provider_tree [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1103.738626] env[68233]: DEBUG nova.scheduler.client.report [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1103.757904] env[68233]: DEBUG nova.scheduler.client.report [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1103.823943] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062865} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.823943] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1103.824511] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09399cd0-5610-448d-96e0-e97b2089096c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.844396] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1103.847058] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03a070e3-31fb-4ade-be80-488a28a12f99 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.869018] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1103.869018] env[68233]: value = "task-2783080" [ 1103.869018] env[68233]: _type = "Task" [ 1103.869018] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.879535] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.908629] env[68233]: DEBUG oslo_vmware.api [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783078, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149074} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.908885] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1103.909079] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1103.909408] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1103.909443] env[68233]: INFO nova.compute.manager [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1103.909666] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1103.912150] env[68233]: DEBUG nova.compute.manager [-] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1103.912258] env[68233]: DEBUG nova.network.neutron [-] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1103.959917] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c157ee4a-b866-4eac-9a50-24d0cbc583ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.967959] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b465d2c7-4339-41a5-9732-a963fb3ea3be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.998036] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d08224-488b-44e6-a4eb-e881e0b66f07 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.005857] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3fcb8d-6b39-40e3-9fb7-40ebd2264ed1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.021822] env[68233]: DEBUG nova.compute.provider_tree [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1104.204802] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783079, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.384757] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783080, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.559456] env[68233]: DEBUG nova.scheduler.client.report [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1104.559764] env[68233]: DEBUG nova.compute.provider_tree [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 144 to 145 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1104.559952] env[68233]: DEBUG nova.compute.provider_tree [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1104.698378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.698378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.698378] env[68233]: DEBUG nova.network.neutron [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.709731] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783079, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.721602] env[68233]: DEBUG nova.compute.manager [req-f4f06e6f-6d9d-4bbb-84c5-0ae3d533c2d0 req-d7c9a78e-3ce3-4ba6-bfca-c6bc0d431e0f service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Received event network-vif-deleted-cc05db07-a36a-494d-92b6-af58fdd9d143 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1104.721810] env[68233]: INFO nova.compute.manager [req-f4f06e6f-6d9d-4bbb-84c5-0ae3d533c2d0 req-d7c9a78e-3ce3-4ba6-bfca-c6bc0d431e0f service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Neutron deleted interface cc05db07-a36a-494d-92b6-af58fdd9d143; detaching it from the instance and deleting it from the info cache [ 1104.721978] env[68233]: DEBUG nova.network.neutron [req-f4f06e6f-6d9d-4bbb-84c5-0ae3d533c2d0 req-d7c9a78e-3ce3-4ba6-bfca-c6bc0d431e0f service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.879765] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783080, 'name': ReconfigVM_Task, 'duration_secs': 0.682065} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.880067] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Reconfigured VM instance instance-0000006b to attach disk [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.880738] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2aba145e-ca1c-4c24-bd23-b93cb0bd3716 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.888999] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1104.888999] env[68233]: value = "task-2783081" [ 1104.888999] env[68233]: _type = "Task" [ 1104.888999] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.898646] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783081, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.066879] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.175s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.067434] env[68233]: DEBUG nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1105.070104] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.601s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1105.070323] env[68233]: DEBUG nova.objects.instance [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lazy-loading 'resources' on Instance uuid 72c7e272-dd92-40a5-875b-3edfa1ad282b {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1105.205363] env[68233]: DEBUG nova.network.neutron [-] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.212098] env[68233]: DEBUG oslo_vmware.api [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783079, 'name': PowerOnVM_Task, 'duration_secs': 1.106925} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.213166] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.213166] env[68233]: INFO nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Took 6.83 seconds to spawn the instance on the hypervisor. [ 1105.213166] env[68233]: DEBUG nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.214769] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc742e1-847d-42b6-8da6-02d175cdec5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.224865] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa5aa5d7-479b-49f6-b29e-c764747e17b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.235756] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bcd8f38-f882-4dfa-be25-37219e350325 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.268355] env[68233]: DEBUG nova.compute.manager [req-f4f06e6f-6d9d-4bbb-84c5-0ae3d533c2d0 req-d7c9a78e-3ce3-4ba6-bfca-c6bc0d431e0f service nova] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Detach interface failed, port_id=cc05db07-a36a-494d-92b6-af58fdd9d143, reason: Instance 4a388705-7e00-45dc-8891-c6e587b1cdb8 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1105.401056] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783081, 'name': Rename_Task, 'duration_secs': 0.156428} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.401056] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.401056] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2a10004-cb50-4dc1-b688-1beab6b38cac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.406254] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1105.406254] env[68233]: value = "task-2783082" [ 1105.406254] env[68233]: _type = "Task" [ 1105.406254] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.413300] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.422866] env[68233]: DEBUG nova.network.neutron [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [{"id": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "address": "fa:16:3e:26:3b:ea", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f0efdc2-cf", "ovs_interfaceid": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.572996] env[68233]: DEBUG nova.compute.utils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1105.577787] env[68233]: DEBUG nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1105.710760] env[68233]: INFO nova.compute.manager [-] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Took 1.80 seconds to deallocate network for instance. [ 1105.740335] env[68233]: INFO nova.compute.manager [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Took 18.90 seconds to build instance. [ 1105.752029] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd4a864-3019-436a-9def-9a588439d246 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.760974] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10d6208-73a8-4836-bc45-6a46150548c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.795872] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa45ff96-1d1d-4fc0-995a-1a5686cef8b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.803264] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9887280f-a96c-4e39-b6e8-2e4f1bf0ccf4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.817334] env[68233]: DEBUG nova.compute.provider_tree [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.915897] env[68233]: DEBUG oslo_vmware.api [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783082, 'name': PowerOnVM_Task, 'duration_secs': 0.4729} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.916169] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.916397] env[68233]: INFO nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Took 5.28 seconds to spawn the instance on the hypervisor. [ 1105.916590] env[68233]: DEBUG nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.917335] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3606f3-cbb4-4975-b5e3-410f184bc83f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.925309] env[68233]: DEBUG oslo_concurrency.lockutils [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.078479] env[68233]: DEBUG nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1106.218475] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.242738] env[68233]: DEBUG oslo_concurrency.lockutils [None req-cd2d274b-4f43-493c-9a45-1c018d27f90e tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.439s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.320954] env[68233]: DEBUG nova.scheduler.client.report [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.440701] env[68233]: INFO nova.compute.manager [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Took 19.37 seconds to build instance. [ 1106.450307] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59058ee4-b18f-44e0-bf97-3ef38c1f7da8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.469452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e12cead-8546-4bb4-b7af-16bf45a355e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.475708] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1106.826107] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.756s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.829063] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.054s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.830821] env[68233]: INFO nova.compute.claims [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.848154] env[68233]: INFO nova.scheduler.client.report [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Deleted allocations for instance 72c7e272-dd92-40a5-875b-3edfa1ad282b [ 1106.942881] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60b96fba-8ac3-45e4-92a3-3c25cc10aaf6 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "ac108b76-385d-40c2-992c-dc7561227130" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.884s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.981864] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1106.982258] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6c428be0-6b3d-4930-a19d-1a4a58672d86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.995642] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1106.995642] env[68233]: value = "task-2783083" [ 1106.995642] env[68233]: _type = "Task" [ 1106.995642] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.004941] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783083, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.088656] env[68233]: DEBUG nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1107.109131] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1107.109435] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1107.109604] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1107.109784] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1107.109923] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1107.110084] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1107.110306] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1107.110491] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1107.110681] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1107.110855] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1107.111067] env[68233]: DEBUG nova.virt.hardware [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1107.111940] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49ff704-fd6b-45fd-b77b-7136b7344408 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.119524] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98dc6ba6-010a-490a-b019-8605782b96e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.133456] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.138897] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Creating folder: Project (4bd1a52a960042bca4a0b3f685a86dee). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1107.139483] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a12b176-51b4-4815-93e6-df483240da39 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.149166] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Created folder: Project (4bd1a52a960042bca4a0b3f685a86dee) in parent group-v559223. [ 1107.149363] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Creating folder: Instances. Parent ref: group-v559511. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1107.149592] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-103a656b-9a0f-4e8e-9a0b-90e815d7ddf7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.157366] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Created folder: Instances in parent group-v559511. [ 1107.157588] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1107.157770] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1107.157968] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b283b285-d8b8-440b-b6d0-81fde4f0f6ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.173508] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.173508] env[68233]: value = "task-2783086" [ 1107.173508] env[68233]: _type = "Task" [ 1107.173508] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.180596] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783086, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.356115] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e8cedc6-adaf-4c18-837e-4d56085901e6 tempest-ServerTagsTestJSON-926069465 tempest-ServerTagsTestJSON-926069465-project-member] Lock "72c7e272-dd92-40a5-875b-3edfa1ad282b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.830s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1107.496386] env[68233]: INFO nova.compute.manager [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Rebuilding instance [ 1107.508051] env[68233]: DEBUG oslo_vmware.api [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783083, 'name': PowerOnVM_Task, 'duration_secs': 0.434838} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.508321] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1107.508506] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-53333a96-9933-4e1b-bc08-92d366f5fa39 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance '7025be4e-b800-42c8-a2c0-3ea059d3b929' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1107.542166] env[68233]: DEBUG nova.compute.manager [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1107.543071] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a20a389-0b94-4907-9359-b6bcfb9c636a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.683390] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783086, 'name': CreateVM_Task, 'duration_secs': 0.250815} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.683578] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1107.684018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.684194] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.684556] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1107.684836] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c323245b-a495-4518-a509-7c09030c2de2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.689321] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1107.689321] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528c6c51-2397-f78e-78e1-44d11b5387a9" [ 1107.689321] env[68233]: _type = "Task" [ 1107.689321] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.696877] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528c6c51-2397-f78e-78e1-44d11b5387a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.998219] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde44522-77a7-4c67-ac97-85f262953379 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.005566] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3624b44d-f6b7-4c18-be23-aa22e5b9cce8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.040675] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b363642d-6719-40f5-bd87-56a8415af15a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.048011] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c86eded-3bb2-4c7e-84e1-4c99ba8e528c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.066291] env[68233]: DEBUG nova.compute.provider_tree [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1108.202630] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528c6c51-2397-f78e-78e1-44d11b5387a9, 'name': SearchDatastore_Task, 'duration_secs': 0.02135} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.202920] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.206022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.206022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.206022] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.206022] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1108.206022] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-abac7071-decd-4451-9c21-8c476dc6e007 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.213854] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1108.214346] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1108.215607] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea985cd8-7d25-4f1c-ad7b-a47042f71fe0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.222261] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1108.222261] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52734b40-252e-b35d-767e-00be816773aa" [ 1108.222261] env[68233]: _type = "Task" [ 1108.222261] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.228910] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52734b40-252e-b35d-767e-00be816773aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.555704] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.556080] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-433ec3cd-7ced-4ff9-b56b-45201b3fc10a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.564199] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1108.564199] env[68233]: value = "task-2783087" [ 1108.564199] env[68233]: _type = "Task" [ 1108.564199] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.568487] env[68233]: DEBUG nova.scheduler.client.report [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.575812] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.733597] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52734b40-252e-b35d-767e-00be816773aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010336} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.734419] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-037e5c67-980f-472a-aaff-23fee18cf33e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.741630] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1108.741630] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6ac98-b9c3-cb0d-a099-93c133ecc47e" [ 1108.741630] env[68233]: _type = "Task" [ 1108.741630] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.749545] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6ac98-b9c3-cb0d-a099-93c133ecc47e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.075835] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783087, 'name': PowerOffVM_Task, 'duration_secs': 0.129182} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.076369] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1109.076906] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1109.077468] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.249s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1109.077941] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1109.080839] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5cc9d02-fec3-4a48-9434-af23b7d807c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.083946] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.736s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.085705] env[68233]: INFO nova.compute.claims [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1109.092732] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1109.092967] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ca5f553-e33e-4e36-a3ad-bdc28256977b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.121485] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1109.121632] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1109.121825] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleting the datastore file [datastore2] ac108b76-385d-40c2-992c-dc7561227130 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1109.122100] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42c4b805-9603-4258-bd5d-596efa1fdf67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.128739] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1109.128739] env[68233]: value = "task-2783089" [ 1109.128739] env[68233]: _type = "Task" [ 1109.128739] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.138454] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.252288] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d6ac98-b9c3-cb0d-a099-93c133ecc47e, 'name': SearchDatastore_Task, 'duration_secs': 0.02279} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.252579] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.252841] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1109.253112] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f13e27c0-48aa-44fe-9663-1249d7f522c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.259038] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1109.259038] env[68233]: value = "task-2783090" [ 1109.259038] env[68233]: _type = "Task" [ 1109.259038] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.267035] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783090, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.585791] env[68233]: DEBUG nova.compute.utils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.587780] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.587948] env[68233]: DEBUG nova.network.neutron [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.642558] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16138} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.642847] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.643143] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.643443] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.647643] env[68233]: DEBUG nova.policy [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e45602ffbf4d66b6bfcac59f078e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd32ae322ad5641b4bebd1aa390b5914f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.769715] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783090, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.934750] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.937935] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1109.937935] env[68233]: DEBUG nova.compute.manager [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Going to confirm migration 4 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1109.983032] env[68233]: DEBUG nova.network.neutron [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Successfully created port: f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.091268] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1110.271922] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783090, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616409} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.272196] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1110.272414] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1110.272822] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-035ad168-4db1-46b7-8f06-3ea013ea1f7e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.280023] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1110.280023] env[68233]: value = "task-2783091" [ 1110.280023] env[68233]: _type = "Task" [ 1110.280023] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.287539] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783091, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.291985] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b78ca85-0fe8-4328-8909-f950d3c9f4af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.299857] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae8b824-49df-4799-be9a-7e305668c258 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.330662] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35532d9e-540e-42b8-81a2-c7170f9b99ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.338539] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2c82b7-40c3-47f5-9269-724bad33ab82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.353494] env[68233]: DEBUG nova.compute.provider_tree [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1110.374138] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "550a52f6-e11b-4d34-ad82-9b39d33780d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.374404] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "550a52f6-e11b-4d34-ad82-9b39d33780d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.502503] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.502690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.502867] env[68233]: DEBUG nova.network.neutron [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.503068] env[68233]: DEBUG nova.objects.instance [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'info_cache' on Instance uuid 7025be4e-b800-42c8-a2c0-3ea059d3b929 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1110.682278] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1110.682576] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.682743] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1110.682927] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.683124] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1110.683380] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1110.683678] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1110.683916] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1110.684193] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1110.684460] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1110.684673] env[68233]: DEBUG nova.virt.hardware [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1110.685565] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7661aedf-2ebd-47d3-8d45-e43bcf11171f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.693614] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7b79b63-c570-4515-95b9-65095c1e6936 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.707777] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1110.713421] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1110.713614] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1110.713812] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-22ba9515-694b-4361-a569-1524ff7fd65e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.729540] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1110.729540] env[68233]: value = "task-2783092" [ 1110.729540] env[68233]: _type = "Task" [ 1110.729540] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.738344] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783092, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.788965] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783091, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061992} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.789245] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1110.790015] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e138a89-6b5a-4d44-8e9e-23d3f04043ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.810034] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1110.810287] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7028b972-cda0-4693-8d01-74bd4871fed4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.832301] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1110.832301] env[68233]: value = "task-2783093" [ 1110.832301] env[68233]: _type = "Task" [ 1110.832301] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.840560] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.875043] env[68233]: ERROR nova.scheduler.client.report [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [req-a8458625-3641-4d82-8231-db4eb7d13140] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a8458625-3641-4d82-8231-db4eb7d13140"}]} [ 1110.877405] env[68233]: DEBUG nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1110.894039] env[68233]: DEBUG nova.scheduler.client.report [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1110.908585] env[68233]: DEBUG nova.scheduler.client.report [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1110.908925] env[68233]: DEBUG nova.compute.provider_tree [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1110.921327] env[68233]: DEBUG nova.scheduler.client.report [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1110.940337] env[68233]: DEBUG nova.scheduler.client.report [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1111.104064] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1111.106495] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8766ad9-ba1e-4f07-9e88-6eb08ab12676 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.116053] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd25cc1-6b1b-4501-b126-e1f3f0b6a382 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.146761] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdbea82-9cc3-4d0f-a991-480b18e15110 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.152047] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1111.152288] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1111.152445] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1111.152626] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1111.152774] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1111.152920] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1111.153140] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1111.153341] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1111.153571] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1111.153754] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1111.153923] env[68233]: DEBUG nova.virt.hardware [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1111.154740] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4ae330-33fa-4846-9d9c-fa966bfb689c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.160452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e26722-0759-4259-a04c-ff0154aec1c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.167148] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f243a3-5752-44ed-9d7f-65f76363a7a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.178710] env[68233]: DEBUG nova.compute.provider_tree [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.239472] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783092, 'name': CreateVM_Task, 'duration_secs': 0.469195} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.239644] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1111.240069] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.240236] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.240562] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1111.240814] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-559cc3f8-c96e-4927-9510-9b9795e8621f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.245123] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1111.245123] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c1599b-ec2d-70a0-c388-31ac98da795c" [ 1111.245123] env[68233]: _type = "Task" [ 1111.245123] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.252512] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c1599b-ec2d-70a0-c388-31ac98da795c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.343742] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783093, 'name': ReconfigVM_Task, 'duration_secs': 0.268999} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.344013] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1111.344709] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37a21ec6-657a-41cf-a171-5cbf7823ae3a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.351085] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1111.351085] env[68233]: value = "task-2783094" [ 1111.351085] env[68233]: _type = "Task" [ 1111.351085] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.358781] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783094, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.376814] env[68233]: DEBUG nova.compute.manager [req-b8f5e121-b9f1-47da-b5d7-2c1483b13a36 req-64facb82-c941-42f0-a131-2fbe184ccf87 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Received event network-vif-plugged-f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1111.377108] env[68233]: DEBUG oslo_concurrency.lockutils [req-b8f5e121-b9f1-47da-b5d7-2c1483b13a36 req-64facb82-c941-42f0-a131-2fbe184ccf87 service nova] Acquiring lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.377384] env[68233]: DEBUG oslo_concurrency.lockutils [req-b8f5e121-b9f1-47da-b5d7-2c1483b13a36 req-64facb82-c941-42f0-a131-2fbe184ccf87 service nova] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.377637] env[68233]: DEBUG oslo_concurrency.lockutils [req-b8f5e121-b9f1-47da-b5d7-2c1483b13a36 req-64facb82-c941-42f0-a131-2fbe184ccf87 service nova] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.377811] env[68233]: DEBUG nova.compute.manager [req-b8f5e121-b9f1-47da-b5d7-2c1483b13a36 req-64facb82-c941-42f0-a131-2fbe184ccf87 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] No waiting events found dispatching network-vif-plugged-f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1111.377977] env[68233]: WARNING nova.compute.manager [req-b8f5e121-b9f1-47da-b5d7-2c1483b13a36 req-64facb82-c941-42f0-a131-2fbe184ccf87 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Received unexpected event network-vif-plugged-f23451b7-3ec0-4c70-93bd-a52f260a2dc1 for instance with vm_state building and task_state spawning. [ 1111.398199] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.500076] env[68233]: DEBUG nova.network.neutron [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Successfully updated port: f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.711942] env[68233]: DEBUG nova.scheduler.client.report [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 146 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1111.712300] env[68233]: DEBUG nova.compute.provider_tree [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 146 to 147 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1111.712501] env[68233]: DEBUG nova.compute.provider_tree [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1111.755490] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c1599b-ec2d-70a0-c388-31ac98da795c, 'name': SearchDatastore_Task, 'duration_secs': 0.028817} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.756202] env[68233]: DEBUG nova.network.neutron [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [{"id": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "address": "fa:16:3e:26:3b:ea", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f0efdc2-cf", "ovs_interfaceid": "4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.760098] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.760098] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1111.760098] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.760098] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.760098] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.760098] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a457e0fd-65d0-45af-9161-977ac72b263b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.776711] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.776904] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1111.777645] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bb6a32b-9709-4aee-b272-eb577d46252c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.783231] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1111.783231] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525f8499-ff2f-c868-5450-f4e7c70d41d0" [ 1111.783231] env[68233]: _type = "Task" [ 1111.783231] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.790425] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525f8499-ff2f-c868-5450-f4e7c70d41d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.861214] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783094, 'name': Rename_Task, 'duration_secs': 0.128779} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.861511] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.861762] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81c53ff3-452f-4013-a65b-08c398bf3e97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.868138] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1111.868138] env[68233]: value = "task-2783095" [ 1111.868138] env[68233]: _type = "Task" [ 1111.868138] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.875578] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.002650] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-0d79ccd0-d24d-4200-9d34-f3a7f44370aa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.002845] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-0d79ccd0-d24d-4200-9d34-f3a7f44370aa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.003263] env[68233]: DEBUG nova.network.neutron [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.218354] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.134s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.218904] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1112.221788] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.003s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.221988] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.224614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.826s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.226072] env[68233]: INFO nova.compute.claims [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1112.246847] env[68233]: INFO nova.scheduler.client.report [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted allocations for instance 4a388705-7e00-45dc-8891-c6e587b1cdb8 [ 1112.259822] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-7025be4e-b800-42c8-a2c0-3ea059d3b929" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.260203] env[68233]: DEBUG nova.objects.instance [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'migration_context' on Instance uuid 7025be4e-b800-42c8-a2c0-3ea059d3b929 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1112.295328] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]525f8499-ff2f-c868-5450-f4e7c70d41d0, 'name': SearchDatastore_Task, 'duration_secs': 0.033855} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.295998] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61bc3222-945b-45ed-a837-a749d8491890 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.301058] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1112.301058] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ff6610-61b8-8d12-dcb5-1eae1b323673" [ 1112.301058] env[68233]: _type = "Task" [ 1112.301058] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.309450] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ff6610-61b8-8d12-dcb5-1eae1b323673, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.379870] env[68233]: DEBUG oslo_vmware.api [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783095, 'name': PowerOnVM_Task, 'duration_secs': 0.452463} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.380176] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.380401] env[68233]: INFO nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Took 5.29 seconds to spawn the instance on the hypervisor. [ 1112.380626] env[68233]: DEBUG nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.381509] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c54c633-cf76-4746-8208-f30b16c2e6fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.541318] env[68233]: DEBUG nova.network.neutron [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.672163] env[68233]: DEBUG nova.network.neutron [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Updating instance_info_cache with network_info: [{"id": "f23451b7-3ec0-4c70-93bd-a52f260a2dc1", "address": "fa:16:3e:0a:80:45", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf23451b7-3e", "ovs_interfaceid": "f23451b7-3ec0-4c70-93bd-a52f260a2dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.732097] env[68233]: DEBUG nova.compute.utils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1112.735346] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1112.735530] env[68233]: DEBUG nova.network.neutron [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1112.754654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-34f8483d-e1eb-4b5d-a292-c742e1f24777 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "4a388705-7e00-45dc-8891-c6e587b1cdb8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.462s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1112.762952] env[68233]: DEBUG nova.objects.base [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Object Instance<7025be4e-b800-42c8-a2c0-3ea059d3b929> lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1112.764059] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e18f9a-fa90-40f2-ac25-5aa7972d8fb0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.785958] env[68233]: DEBUG nova.policy [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1112.787574] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f9ebd0c-27f8-4c3b-bfd2-2ff720954d0f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.793682] env[68233]: DEBUG oslo_vmware.api [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1112.793682] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d5d2ea-8777-d765-c162-7d8c299f5e40" [ 1112.793682] env[68233]: _type = "Task" [ 1112.793682] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.802927] env[68233]: DEBUG oslo_vmware.api [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d5d2ea-8777-d765-c162-7d8c299f5e40, 'name': SearchDatastore_Task, 'duration_secs': 0.007125} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.805889] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.810927] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ff6610-61b8-8d12-dcb5-1eae1b323673, 'name': SearchDatastore_Task, 'duration_secs': 0.016021} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.811181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.811465] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1112.811697] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-588d8805-bb57-41ae-a0c9-73ae44a15b87 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.818505] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1112.818505] env[68233]: value = "task-2783096" [ 1112.818505] env[68233]: _type = "Task" [ 1112.818505] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.826057] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783096, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.902742] env[68233]: INFO nova.compute.manager [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Took 20.51 seconds to build instance. [ 1113.175422] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-0d79ccd0-d24d-4200-9d34-f3a7f44370aa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.175855] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Instance network_info: |[{"id": "f23451b7-3ec0-4c70-93bd-a52f260a2dc1", "address": "fa:16:3e:0a:80:45", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf23451b7-3e", "ovs_interfaceid": "f23451b7-3ec0-4c70-93bd-a52f260a2dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1113.176378] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:80:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f23451b7-3ec0-4c70-93bd-a52f260a2dc1', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1113.186639] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.187288] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1113.188098] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-851714b7-59c4-4db1-8b88-f611aa94c863 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.209758] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.209758] env[68233]: value = "task-2783097" [ 1113.209758] env[68233]: _type = "Task" [ 1113.209758] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.218971] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783097, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.239143] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1113.332050] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783096, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495624} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.332875] env[68233]: DEBUG nova.network.neutron [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Successfully created port: 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1113.334930] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.335202] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.337776] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-842cb5b7-14a9-425c-a4d3-4197fe4b4920 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.345292] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1113.345292] env[68233]: value = "task-2783098" [ 1113.345292] env[68233]: _type = "Task" [ 1113.345292] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.362237] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783098, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.405096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2a59e720-b00c-4f87-9ed6-0c406a216ce8 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.027s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.407640] env[68233]: DEBUG nova.compute.manager [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Received event network-changed-f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1113.407856] env[68233]: DEBUG nova.compute.manager [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Refreshing instance network info cache due to event network-changed-f23451b7-3ec0-4c70-93bd-a52f260a2dc1. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1113.408067] env[68233]: DEBUG oslo_concurrency.lockutils [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] Acquiring lock "refresh_cache-0d79ccd0-d24d-4200-9d34-f3a7f44370aa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.408214] env[68233]: DEBUG oslo_concurrency.lockutils [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] Acquired lock "refresh_cache-0d79ccd0-d24d-4200-9d34-f3a7f44370aa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.408384] env[68233]: DEBUG nova.network.neutron [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Refreshing network info cache for port f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.457505] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aa5307-2442-4989-8aca-f78ecbe9320b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.464970] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d257671-89b1-4e4f-ad6f-5fd480996ec5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.501218] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7287de10-1bda-4c76-bd7e-0d93463c0dfd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.509351] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae586a1-b4da-475d-b4ee-264419626993 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.522363] env[68233]: DEBUG nova.compute.provider_tree [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1113.719602] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783097, 'name': CreateVM_Task, 'duration_secs': 0.381157} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.719781] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.720484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.720647] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.720954] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1113.721212] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d009ec5-8430-4930-9e64-5fc6bcdc4e0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.725667] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1113.725667] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5200c089-6b40-a3b1-426d-917016e6c813" [ 1113.725667] env[68233]: _type = "Task" [ 1113.725667] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.732991] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5200c089-6b40-a3b1-426d-917016e6c813, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.859112] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783098, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065759} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.859391] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.860745] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53779917-b263-4009-827a-6ffbc8156ca4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.882145] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.882956] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4732cd7-95b4-4bcb-a7f8-1aec8a36f25c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.901024] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "9f862347-508b-4c8a-a338-97972b0c0b0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.901024] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.907233] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1113.907233] env[68233]: value = "task-2783099" [ 1113.907233] env[68233]: _type = "Task" [ 1113.907233] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.916768] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.918980] env[68233]: INFO nova.compute.manager [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Rebuilding instance [ 1113.970894] env[68233]: DEBUG nova.compute.manager [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1113.971949] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b1042e-8a7e-4c21-81f1-a490a042de5a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.046582] env[68233]: ERROR nova.scheduler.client.report [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [req-2bbbe7a0-0477-48fb-9b52-34c074692ae2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2bbbe7a0-0477-48fb-9b52-34c074692ae2"}]} [ 1114.062383] env[68233]: DEBUG nova.scheduler.client.report [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1114.076797] env[68233]: DEBUG nova.scheduler.client.report [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1114.077051] env[68233]: DEBUG nova.compute.provider_tree [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1114.088836] env[68233]: DEBUG nova.scheduler.client.report [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1114.107358] env[68233]: DEBUG nova.scheduler.client.report [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1114.122749] env[68233]: DEBUG nova.network.neutron [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Updated VIF entry in instance network info cache for port f23451b7-3ec0-4c70-93bd-a52f260a2dc1. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.123130] env[68233]: DEBUG nova.network.neutron [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Updating instance_info_cache with network_info: [{"id": "f23451b7-3ec0-4c70-93bd-a52f260a2dc1", "address": "fa:16:3e:0a:80:45", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf23451b7-3e", "ovs_interfaceid": "f23451b7-3ec0-4c70-93bd-a52f260a2dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.237666] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5200c089-6b40-a3b1-426d-917016e6c813, 'name': SearchDatastore_Task, 'duration_secs': 0.073754} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.240239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.240487] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.240726] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.240869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.241118] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.241719] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29bd5030-f81c-430c-bbca-c623d01b9f40 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.252732] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1114.256848] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.257048] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.257749] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67908ed1-f3ee-481a-b90c-b08cf5352ab0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.264048] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6daabfac-a141-406c-be9c-725a49ee5cdc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.269309] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1114.269309] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e67967-1083-7689-8b88-57f914f0d390" [ 1114.269309] env[68233]: _type = "Task" [ 1114.269309] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.276392] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1114.276621] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1114.277090] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1114.277090] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1114.277090] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1114.277256] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1114.277455] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1114.277597] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1114.277762] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1114.277920] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1114.278103] env[68233]: DEBUG nova.virt.hardware [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1114.279416] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-408740f1-561a-4fbc-9525-db1d20d5fb2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.282892] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c708d6cd-6429-4c47-be5d-9bc0ed85fb6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.288455] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e67967-1083-7689-8b88-57f914f0d390, 'name': SearchDatastore_Task, 'duration_secs': 0.018501} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.290029] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f10c27df-387e-40f1-90a7-21286d3a1cbe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.319651] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03d5c7d-9b0f-4a78-b7d4-2631e12c9d7d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.322855] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44cf640-5a2d-4e4b-a6b5-00404aa7c6fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.328043] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1114.328043] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fbcb-bbd7-79f8-70fb-4cbbb8695321" [ 1114.328043] env[68233]: _type = "Task" [ 1114.328043] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.341963] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f90dd0-d0ab-4561-a23f-7d68ff077b5b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.348314] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fbcb-bbd7-79f8-70fb-4cbbb8695321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.357793] env[68233]: DEBUG nova.compute.provider_tree [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1114.403063] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1114.417893] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.627751] env[68233]: DEBUG oslo_concurrency.lockutils [req-b46dce7e-f467-4d7b-a4ca-92e6cb8c61bd req-c9dfdabf-4829-4613-840d-310e2493ebb8 service nova] Releasing lock "refresh_cache-0d79ccd0-d24d-4200-9d34-f3a7f44370aa" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.841604] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5269fbcb-bbd7-79f8-70fb-4cbbb8695321, 'name': SearchDatastore_Task, 'duration_secs': 0.031526} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.841604] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.841604] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0d79ccd0-d24d-4200-9d34-f3a7f44370aa/0d79ccd0-d24d-4200-9d34-f3a7f44370aa.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1114.841604] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-492919c6-06d3-4743-9d22-9716eae4b9e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.846116] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1114.846116] env[68233]: value = "task-2783100" [ 1114.846116] env[68233]: _type = "Task" [ 1114.846116] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.855154] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783100, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.898958] env[68233]: DEBUG nova.scheduler.client.report [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 148 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1114.899362] env[68233]: DEBUG nova.compute.provider_tree [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 148 to 149 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1114.899630] env[68233]: DEBUG nova.compute.provider_tree [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1114.920180] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783099, 'name': ReconfigVM_Task, 'duration_secs': 0.879895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.921177] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Reconfigured VM instance instance-0000006b to attach disk [datastore2] ac108b76-385d-40c2-992c-dc7561227130/ac108b76-385d-40c2-992c-dc7561227130.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.921689] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d5518c11-83b6-4459-9260-b923c0c0ae7a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.926232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.927971] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1114.927971] env[68233]: value = "task-2783101" [ 1114.927971] env[68233]: _type = "Task" [ 1114.927971] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.938448] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783101, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.959129] env[68233]: DEBUG nova.network.neutron [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Successfully updated port: 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.986169] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1114.986512] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e98e42d-787b-407e-b30d-585a58b11e77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.993728] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1114.993728] env[68233]: value = "task-2783102" [ 1114.993728] env[68233]: _type = "Task" [ 1114.993728] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.002774] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783102, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.355432] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783100, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495498} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.355794] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 0d79ccd0-d24d-4200-9d34-f3a7f44370aa/0d79ccd0-d24d-4200-9d34-f3a7f44370aa.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1115.355932] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1115.356203] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7cbd12e-a9cd-455f-969d-8b601afb3307 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.361845] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1115.361845] env[68233]: value = "task-2783103" [ 1115.361845] env[68233]: _type = "Task" [ 1115.361845] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.369616] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783103, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.405627] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.181s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.406321] env[68233]: DEBUG nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1115.409141] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.603s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.435381] env[68233]: DEBUG nova.compute.manager [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-vif-plugged-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1115.435665] env[68233]: DEBUG oslo_concurrency.lockutils [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.435896] env[68233]: DEBUG oslo_concurrency.lockutils [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.436085] env[68233]: DEBUG oslo_concurrency.lockutils [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.436261] env[68233]: DEBUG nova.compute.manager [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] No waiting events found dispatching network-vif-plugged-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.436428] env[68233]: WARNING nova.compute.manager [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received unexpected event network-vif-plugged-1d10db7a-f783-4b60-b20a-834d68367b3c for instance with vm_state building and task_state spawning. [ 1115.436587] env[68233]: DEBUG nova.compute.manager [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1115.436739] env[68233]: DEBUG nova.compute.manager [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing instance network info cache due to event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1115.436919] env[68233]: DEBUG oslo_concurrency.lockutils [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.437102] env[68233]: DEBUG oslo_concurrency.lockutils [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.437291] env[68233]: DEBUG nova.network.neutron [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.442187] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783101, 'name': Rename_Task, 'duration_secs': 0.342609} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.442620] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1115.442860] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b133785a-cede-476d-a976-7f0c31e99270 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.449802] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1115.449802] env[68233]: value = "task-2783104" [ 1115.449802] env[68233]: _type = "Task" [ 1115.449802] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.458207] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.459725] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.502655] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783102, 'name': PowerOffVM_Task, 'duration_secs': 0.351014} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.502919] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1115.503174] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1115.503960] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabf00e3-f249-4972-8ae4-6a73286b48e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.510926] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1115.511164] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b59bcc56-2783-4e67-bf26-eee27a8608af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.536022] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1115.536244] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1115.536428] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Deleting the datastore file [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1115.536684] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ded6650b-613c-4485-be7c-0ba3ba3811ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.543940] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1115.543940] env[68233]: value = "task-2783106" [ 1115.543940] env[68233]: _type = "Task" [ 1115.543940] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.551274] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.873425] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783103, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066226} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.873795] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1115.874905] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84c51e0-8cca-4ed6-bc9f-d26b9317525d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.899493] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 0d79ccd0-d24d-4200-9d34-f3a7f44370aa/0d79ccd0-d24d-4200-9d34-f3a7f44370aa.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1115.899779] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a4f9677-2a04-46e0-b775-3816b5e53a71 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.914795] env[68233]: DEBUG nova.compute.utils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1115.919264] env[68233]: DEBUG nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Not allocating networking since 'none' was specified. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1115.923634] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1115.923634] env[68233]: value = "task-2783107" [ 1115.923634] env[68233]: _type = "Task" [ 1115.923634] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.932797] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.959412] env[68233]: DEBUG oslo_vmware.api [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783104, 'name': PowerOnVM_Task, 'duration_secs': 0.459658} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.961882] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.962108] env[68233]: DEBUG nova.compute.manager [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.963384] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3adea362-1fcc-4171-8f70-b9d2fa996614 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.976898] env[68233]: DEBUG nova.network.neutron [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.049608] env[68233]: DEBUG nova.network.neutron [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.055637] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.23061} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.058013] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1116.058136] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1116.058320] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1116.094164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063abad2-d73a-4d1f-8887-24b330fde1f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.101333] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316f2d1d-377d-4660-a04d-01c03d628ec8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.131334] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d47309d-7bb7-4cc8-abcd-5b027ae5a5ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.138260] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af5f05f-a0db-40e7-a357-fbc1fb4ad988 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.152669] env[68233]: DEBUG nova.compute.provider_tree [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.419906] env[68233]: DEBUG nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1116.433862] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783107, 'name': ReconfigVM_Task, 'duration_secs': 0.325807} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.434235] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 0d79ccd0-d24d-4200-9d34-f3a7f44370aa/0d79ccd0-d24d-4200-9d34-f3a7f44370aa.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.434938] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-270191a9-7d03-47cb-bf44-c7c1262d1455 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.441289] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1116.441289] env[68233]: value = "task-2783108" [ 1116.441289] env[68233]: _type = "Task" [ 1116.441289] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.449058] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783108, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.481064] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.552633] env[68233]: DEBUG oslo_concurrency.lockutils [req-46e5b756-5ddb-4900-92e8-1380fa63970b req-637bd65e-0a08-4553-9cee-e0776a31fc6b service nova] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.553039] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.553256] env[68233]: DEBUG nova.network.neutron [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.656287] env[68233]: DEBUG nova.scheduler.client.report [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.951354] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783108, 'name': Rename_Task, 'duration_secs': 0.134647} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.951620] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1116.951859] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e47d0a15-0afe-42de-93f9-a0966fe7da85 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.958560] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1116.958560] env[68233]: value = "task-2783109" [ 1116.958560] env[68233]: _type = "Task" [ 1116.958560] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.966239] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783109, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.085287] env[68233]: DEBUG nova.network.neutron [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1117.095670] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.095953] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.096134] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.096322] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.096467] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.096615] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.096823] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.096980] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.097167] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.097331] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.097503] env[68233]: DEBUG nova.virt.hardware [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.098393] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4091175b-fbc1-46b3-afa7-a1f2284a2910 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.107582] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cb95ebb-a14f-4d03-bbf7-6e88e3a38c96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.121806] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.127962] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.130544] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.130818] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-705beb6e-54c7-4354-a7db-989fbacbcd5f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.147772] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.147772] env[68233]: value = "task-2783110" [ 1117.147772] env[68233]: _type = "Task" [ 1117.147772] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.156825] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783110, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.237176] env[68233]: DEBUG nova.network.neutron [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.403324] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "ac108b76-385d-40c2-992c-dc7561227130" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.403537] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "ac108b76-385d-40c2-992c-dc7561227130" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.403845] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "ac108b76-385d-40c2-992c-dc7561227130-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.404064] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "ac108b76-385d-40c2-992c-dc7561227130-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.404248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "ac108b76-385d-40c2-992c-dc7561227130-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.407026] env[68233]: INFO nova.compute.manager [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Terminating instance [ 1117.429030] env[68233]: DEBUG nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1117.448710] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1117.448840] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1117.448976] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1117.449182] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1117.449329] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1117.449478] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1117.449686] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1117.449843] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1117.450056] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1117.450253] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1117.450429] env[68233]: DEBUG nova.virt.hardware [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1117.451368] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eb7a2d-02fd-4c60-9fa8-5a0769daa827 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.459618] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9082f311-9e09-471b-a680-3438b94655bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.475319] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.480820] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Creating folder: Project (d4d6a8f99a0c489aa386970daa202c53). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1117.483974] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d7bf16b8-8339-459e-9da8-180520103ca2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.485485] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.494670] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Created folder: Project (d4d6a8f99a0c489aa386970daa202c53) in parent group-v559223. [ 1117.494845] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Creating folder: Instances. Parent ref: group-v559517. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1117.495092] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0ccd51c-0cab-4655-8bdf-73015397cd6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.503580] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Created folder: Instances in parent group-v559517. [ 1117.503798] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.503973] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.504174] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-33b9879d-57fa-4299-8b08-99788cd7d5c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.519016] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.519016] env[68233]: value = "task-2783113" [ 1117.519016] env[68233]: _type = "Task" [ 1117.519016] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.525768] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783113, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.661250] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783110, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.667033] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.257s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.669224] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.743s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.670787] env[68233]: INFO nova.compute.claims [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1117.740014] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.740527] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Instance network_info: |[{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.741081] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:8c:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d10db7a-f783-4b60-b20a-834d68367b3c', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.749574] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.750140] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.750427] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1499ea44-dab0-4196-8976-dd6e5ac573ef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.770465] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.770465] env[68233]: value = "task-2783114" [ 1117.770465] env[68233]: _type = "Task" [ 1117.770465] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.777646] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783114, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.911521] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "refresh_cache-ac108b76-385d-40c2-992c-dc7561227130" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.911658] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "refresh_cache-ac108b76-385d-40c2-992c-dc7561227130" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.911842] env[68233]: DEBUG nova.network.neutron [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1117.972724] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783109, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.028458] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783113, 'name': CreateVM_Task, 'duration_secs': 0.249641} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.028655] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.029135] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.029324] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.029659] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.029930] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e60b075-c39d-4c02-9aed-c61253a53c37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.035081] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1118.035081] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52913c02-68c4-979a-e5fd-a4ede5ceecb6" [ 1118.035081] env[68233]: _type = "Task" [ 1118.035081] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.045700] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52913c02-68c4-979a-e5fd-a4ede5ceecb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.158360] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783110, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.231879] env[68233]: INFO nova.scheduler.client.report [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocation for migration 436655a2-141d-496f-bad4-23dbb56d5f94 [ 1118.281067] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783114, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.432828] env[68233]: DEBUG nova.network.neutron [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1118.472953] env[68233]: DEBUG oslo_vmware.api [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783109, 'name': PowerOnVM_Task, 'duration_secs': 1.078206} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.473247] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.473473] env[68233]: INFO nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Took 7.37 seconds to spawn the instance on the hypervisor. [ 1118.473664] env[68233]: DEBUG nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.474476] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b841fcb2-3a7c-404f-9bcc-655707de2901 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.512279] env[68233]: DEBUG nova.network.neutron [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.547650] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52913c02-68c4-979a-e5fd-a4ede5ceecb6, 'name': SearchDatastore_Task, 'duration_secs': 0.01391} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.548049] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.548251] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.548463] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.548685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.548783] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.549048] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67ef96d0-b354-4224-94b7-a84b4c303929 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.562368] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.562532] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.563244] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58c46c90-7837-405d-9784-9c38f86e001a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.569094] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1118.569094] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526c0a51-6a17-bde9-fcfb-a75620444990" [ 1118.569094] env[68233]: _type = "Task" [ 1118.569094] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.576677] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526c0a51-6a17-bde9-fcfb-a75620444990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.660197] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783110, 'name': CreateVM_Task, 'duration_secs': 1.251361} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.660431] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.660941] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.661181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.661548] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1118.661811] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56b062d0-9f41-458b-9b93-a67be65f30bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.666369] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1118.666369] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52874625-93c6-3ece-977f-83a98cd202a6" [ 1118.666369] env[68233]: _type = "Task" [ 1118.666369] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.673925] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52874625-93c6-3ece-977f-83a98cd202a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.737887] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fa9037ca-9395-449a-a7ed-a2863687f614 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.803s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.784794] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783114, 'name': CreateVM_Task, 'duration_secs': 0.907672} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.784794] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1118.785587] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.855064] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cab013-cab6-4e4d-826d-2f7598c02e8e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.863669] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1d9f66-6c93-4354-875d-623d08b351a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.897590] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b84e88-3b25-43a6-9264-1abdf9fb6d50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.905581] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b20311-4d82-4ffd-b927-37f875463434 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.918549] env[68233]: DEBUG nova.compute.provider_tree [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1118.993878] env[68233]: INFO nova.compute.manager [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Took 26.23 seconds to build instance. [ 1119.014847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "refresh_cache-ac108b76-385d-40c2-992c-dc7561227130" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.015315] env[68233]: DEBUG nova.compute.manager [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1119.015500] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1119.016706] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7649bb33-764c-4fc2-9ddf-ceddbf8633f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.024065] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1119.024747] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1398e0de-6c6a-4725-bd37-78062eab92d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.030870] env[68233]: DEBUG oslo_vmware.api [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1119.030870] env[68233]: value = "task-2783115" [ 1119.030870] env[68233]: _type = "Task" [ 1119.030870] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.039339] env[68233]: DEBUG oslo_vmware.api [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.083024] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526c0a51-6a17-bde9-fcfb-a75620444990, 'name': SearchDatastore_Task, 'duration_secs': 0.008643} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.083024] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8ff42ee-a89d-4551-bee3-905f152601d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.087901] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1119.087901] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527cd4fc-901c-914f-4c34-8e4944f9eb4c" [ 1119.087901] env[68233]: _type = "Task" [ 1119.087901] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.097686] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527cd4fc-901c-914f-4c34-8e4944f9eb4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.116130] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.116130] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.116130] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.116130] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.117452] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.117452] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.117452] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.117452] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1119.117452] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.177930] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52874625-93c6-3ece-977f-83a98cd202a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009316} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.177930] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.178098] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.178263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.178517] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.178902] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1119.179192] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50d9c8f0-b67c-466f-94d9-62c98f4acf37 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.184616] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1119.184616] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e18451-36fb-8bde-de48-0eea19bade11" [ 1119.184616] env[68233]: _type = "Task" [ 1119.184616] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.192872] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e18451-36fb-8bde-de48-0eea19bade11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.421757] env[68233]: DEBUG nova.scheduler.client.report [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1119.496398] env[68233]: DEBUG oslo_concurrency.lockutils [None req-92a99ae4-b825-4d3e-9f63-5f527c04e58e tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.744s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.541122] env[68233]: DEBUG oslo_vmware.api [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783115, 'name': PowerOffVM_Task, 'duration_secs': 0.238271} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.542026] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1119.542026] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1119.542026] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b79237f-309c-41b9-89e3-e7fb535f0ea2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.565073] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1119.565312] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1119.565495] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleting the datastore file [datastore2] ac108b76-385d-40c2-992c-dc7561227130 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1119.566133] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1690f8f9-bcaf-4077-95c8-6978de947b53 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.573305] env[68233]: DEBUG oslo_vmware.api [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1119.573305] env[68233]: value = "task-2783117" [ 1119.573305] env[68233]: _type = "Task" [ 1119.573305] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.580733] env[68233]: DEBUG oslo_vmware.api [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783117, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.597134] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527cd4fc-901c-914f-4c34-8e4944f9eb4c, 'name': SearchDatastore_Task, 'duration_secs': 0.010863} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.597388] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.597638] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1119.597925] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.598127] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1119.598335] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb68d9f7-ded0-4b14-ba25-cce8e82203ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.600260] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0e352917-e3fa-44c3-9e29-235384a21671 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.605689] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1119.605689] env[68233]: value = "task-2783118" [ 1119.605689] env[68233]: _type = "Task" [ 1119.605689] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.609436] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1119.609630] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1119.610608] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2caf310f-8c3b-44fa-90f1-942fbea96505 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.616043] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.620023] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.620360] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1119.620360] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520278f3-f50c-e9ec-c225-0bf041cdd0cb" [ 1119.620360] env[68233]: _type = "Task" [ 1119.620360] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.627944] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520278f3-f50c-e9ec-c225-0bf041cdd0cb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.696581] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e18451-36fb-8bde-de48-0eea19bade11, 'name': SearchDatastore_Task, 'duration_secs': 0.011174} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.697417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.697417] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1119.697417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1119.794878] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d9ee7d-4801-49b0-9e15-290f0bed8b00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.803266] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Suspending the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1119.803610] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-88ed8cdb-4a93-4853-8a40-af79a7c436ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.811780] env[68233]: DEBUG oslo_vmware.api [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1119.811780] env[68233]: value = "task-2783119" [ 1119.811780] env[68233]: _type = "Task" [ 1119.811780] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.821640] env[68233]: DEBUG oslo_vmware.api [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783119, 'name': SuspendVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.910484] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1119.910855] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.927481] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.929147] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1119.933508] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 3.452s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1119.933508] env[68233]: DEBUG nova.objects.instance [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1120.084761] env[68233]: DEBUG oslo_vmware.api [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783117, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.102467} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.085400] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1120.085595] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1120.085779] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1120.085945] env[68233]: INFO nova.compute.manager [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: ac108b76-385d-40c2-992c-dc7561227130] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1120.086206] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1120.086394] env[68233]: DEBUG nova.compute.manager [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1120.086488] env[68233]: DEBUG nova.network.neutron [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1120.110258] env[68233]: DEBUG nova.network.neutron [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1120.117008] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461004} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.117286] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.117504] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.117990] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0901b82-a46e-4da1-8cf6-363f8fecb610 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.125284] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1120.125284] env[68233]: value = "task-2783120" [ 1120.125284] env[68233]: _type = "Task" [ 1120.125284] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.132669] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520278f3-f50c-e9ec-c225-0bf041cdd0cb, 'name': SearchDatastore_Task, 'duration_secs': 0.018109} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.133862] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bb34d56-fa59-4016-8f35-05fb62e2a7ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.139278] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.142781] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1120.142781] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f5e693-fc41-4685-fcbc-00c7cf41c996" [ 1120.142781] env[68233]: _type = "Task" [ 1120.142781] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.151579] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f5e693-fc41-4685-fcbc-00c7cf41c996, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.323206] env[68233]: DEBUG oslo_vmware.api [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783119, 'name': SuspendVM_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.327837] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.328119] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.328288] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.328471] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.328644] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.330843] env[68233]: INFO nova.compute.manager [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Terminating instance [ 1120.414317] env[68233]: INFO nova.compute.manager [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Detaching volume 134bd8db-5ff7-4467-9f8a-c0e2aa619b91 [ 1120.440452] env[68233]: DEBUG nova.compute.utils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1120.444673] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1120.444860] env[68233]: DEBUG nova.network.neutron [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1120.451718] env[68233]: INFO nova.virt.block_device [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Attempting to driver detach volume 134bd8db-5ff7-4467-9f8a-c0e2aa619b91 from mountpoint /dev/sdb [ 1120.451967] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1120.452177] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559490', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'name': 'volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '62cd066c-5eac-4f07-bf4e-9275fedc7384', 'attached_at': '', 'detached_at': '', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'serial': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1120.453152] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53740fb8-8d4f-4a05-9ffb-d07644637d04 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.477929] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76501e77-cf4e-48c6-a8c7-4dbd58bea89c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.485320] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c9fe0a-4820-4fea-a8b4-9bdd3c0be136 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.507673] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ab574c-7fc1-40f4-88fa-5a7f08615fc3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.523857] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] The volume has not been displaced from its original location: [datastore2] volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91/volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1120.529402] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfiguring VM instance instance-00000054 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1120.531240] env[68233]: DEBUG nova.policy [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95122ece8b8b445aa04349a675f262b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc7604c87d6485097fe5658d68217b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1120.532848] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2c1d417-2883-4a28-88ac-fdba6aa35065 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.552274] env[68233]: DEBUG oslo_vmware.api [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1120.552274] env[68233]: value = "task-2783121" [ 1120.552274] env[68233]: _type = "Task" [ 1120.552274] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.561244] env[68233]: DEBUG oslo_vmware.api [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783121, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.613062] env[68233]: DEBUG nova.network.neutron [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1120.635664] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116475} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.635984] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1120.636826] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5712c10-0304-4b1d-a782-4ea260fd236b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.658378] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1120.662457] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50507b8b-2675-43e9-9c9e-91fc4fee3979 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.683491] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52f5e693-fc41-4685-fcbc-00c7cf41c996, 'name': SearchDatastore_Task, 'duration_secs': 0.019883} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.685050] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1120.685332] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1120.685676] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1120.685676] env[68233]: value = "task-2783122" [ 1120.685676] env[68233]: _type = "Task" [ 1120.685676] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.685919] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1120.686126] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1120.686353] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-369867ea-129b-4122-b9a1-a31ae91f5f10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.688476] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b347b357-7db1-4ca3-b7eb-1caa38792f32 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.698598] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1120.698598] env[68233]: value = "task-2783123" [ 1120.698598] env[68233]: _type = "Task" [ 1120.698598] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.701897] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.706210] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1120.706395] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1120.707206] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42f73dbb-60c3-414b-a60e-7892ed5a45d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.715705] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.717524] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1120.717524] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5281491b-d541-0b23-4010-22e1494f54fb" [ 1120.717524] env[68233]: _type = "Task" [ 1120.717524] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.725994] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5281491b-d541-0b23-4010-22e1494f54fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.822639] env[68233]: DEBUG oslo_vmware.api [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783119, 'name': SuspendVM_Task} progress is 95%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.827622] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.827847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.835202] env[68233]: DEBUG nova.compute.manager [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1120.835413] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1120.836797] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb5d805-ef11-47bb-9b5f-02315920c12a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.847221] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1120.847484] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ca50d47-ef61-4eb0-9859-c4212e12f1ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.853951] env[68233]: DEBUG oslo_vmware.api [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1120.853951] env[68233]: value = "task-2783124" [ 1120.853951] env[68233]: _type = "Task" [ 1120.853951] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.863316] env[68233]: DEBUG oslo_vmware.api [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783124, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.952363] env[68233]: DEBUG oslo_concurrency.lockutils [None req-507f649d-2d69-4b0c-a51a-ed44ae41ba4d tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.952363] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1120.955386] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.335s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.955685] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.956021] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1120.960229] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f73c43d-0e51-4fe3-8414-f129696a1517 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.971256] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcb59df-4da7-4b6e-8b81-82b224d2f3c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.979381] env[68233]: DEBUG nova.network.neutron [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Successfully created port: 4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1120.994463] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f29a549-194a-482b-bc0e-cc26581af029 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.002131] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7c9d0a-77fb-402a-8751-554e3df540c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.035695] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180031MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1121.035886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.036095] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.062562] env[68233]: DEBUG oslo_vmware.api [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783121, 'name': ReconfigVM_Task, 'duration_secs': 0.499558} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.062850] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Reconfigured VM instance instance-00000054 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1121.069216] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0c58720-7113-41fc-884b-c0a53e15fbaa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.085127] env[68233]: DEBUG oslo_vmware.api [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1121.085127] env[68233]: value = "task-2783125" [ 1121.085127] env[68233]: _type = "Task" [ 1121.085127] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.095932] env[68233]: DEBUG oslo_vmware.api [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783125, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.116455] env[68233]: INFO nova.compute.manager [-] [instance: ac108b76-385d-40c2-992c-dc7561227130] Took 1.03 seconds to deallocate network for instance. [ 1121.202624] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.212642] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783123, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.235053] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5281491b-d541-0b23-4010-22e1494f54fb, 'name': SearchDatastore_Task, 'duration_secs': 0.022092} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.236351] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf17db44-f2f2-47ad-8f7b-fd1aa2fc564d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.245528] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1121.245528] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526dc1ee-8165-8504-0f4c-d192f12b0c8d" [ 1121.245528] env[68233]: _type = "Task" [ 1121.245528] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.258474] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526dc1ee-8165-8504-0f4c-d192f12b0c8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.322540] env[68233]: DEBUG oslo_vmware.api [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783119, 'name': SuspendVM_Task, 'duration_secs': 1.25419} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.322866] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Suspended the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1121.323071] env[68233]: DEBUG nova.compute.manager [None req-993c1367-1b98-4fa1-bfc4-a16c671b5914 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.323928] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b24e38-c97c-4ee0-b453-f85bed1905cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.329566] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1121.365078] env[68233]: DEBUG oslo_vmware.api [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783124, 'name': PowerOffVM_Task, 'duration_secs': 0.398167} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.365538] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1121.365778] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1121.366088] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f77c406c-bec2-4f8d-8181-2e4d356b6fca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.558656] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1121.558932] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1121.559143] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] 7025be4e-b800-42c8-a2c0-3ea059d3b929 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1121.560048] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a6a1825-7a1a-40ef-90eb-78c8bd495127 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.567703] env[68233]: DEBUG oslo_vmware.api [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1121.567703] env[68233]: value = "task-2783127" [ 1121.567703] env[68233]: _type = "Task" [ 1121.567703] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.576075] env[68233]: DEBUG oslo_vmware.api [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.594674] env[68233]: DEBUG oslo_vmware.api [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783125, 'name': ReconfigVM_Task, 'duration_secs': 0.235123} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.594674] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559490', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'name': 'volume-134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '62cd066c-5eac-4f07-bf4e-9275fedc7384', 'attached_at': '', 'detached_at': '', 'volume_id': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91', 'serial': '134bd8db-5ff7-4467-9f8a-c0e2aa619b91'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1121.626449] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.698686] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783122, 'name': ReconfigVM_Task, 'duration_secs': 0.812847} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.699095] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.699664] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dec3ef19-c80c-48b1-a4a0-0469505ef38c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.707330] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1121.707330] env[68233]: value = "task-2783128" [ 1121.707330] env[68233]: _type = "Task" [ 1121.707330] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.714131] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.894244} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.714770] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1121.715015] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1121.715428] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f2ccb87-6c1b-42b3-9eeb-928587a720af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.720283] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783128, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.724098] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1121.724098] env[68233]: value = "task-2783129" [ 1121.724098] env[68233]: _type = "Task" [ 1121.724098] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.732366] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.761248] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526dc1ee-8165-8504-0f4c-d192f12b0c8d, 'name': SearchDatastore_Task, 'duration_secs': 0.079339} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.761539] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1121.761805] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9/03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1121.762174] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2d1a124-06d6-42d8-b748-8241a9eea2f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.770418] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1121.770418] env[68233]: value = "task-2783130" [ 1121.770418] env[68233]: _type = "Task" [ 1121.770418] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.780596] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.856962] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.963310] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1121.989641] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1121.990104] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1121.990317] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1121.990655] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1121.990946] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1121.991159] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1121.991387] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1121.991611] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1121.991756] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1121.991922] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1121.992106] env[68233]: DEBUG nova.virt.hardware [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1121.992952] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d97ccf7-feda-44e3-a715-88f61aabe05e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.001521] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622bee4b-0c63-4b62-bac2-f479e3f6c8e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.064728] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.064911] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 35587446-6f3b-465b-a2a6-0b154374734c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065067] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 62cd066c-5eac-4f07-bf4e-9275fedc7384 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065194] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c2d04b37-3eae-46cb-a227-b62d36c62a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065313] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 7025be4e-b800-42c8-a2c0-3ea059d3b929 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065429] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance f53dccfc-9d0d-4eea-b94c-8527f707c5c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065544] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance ac108b76-385d-40c2-992c-dc7561227130 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065657] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 3af7ccd5-f36b-4596-baf6-ed890e89d6a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065771] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 0d79ccd0-d24d-4200-9d34-f3a7f44370aa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.065953] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.066101] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 550a52f6-e11b-4d34-ad82-9b39d33780d7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.066219] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9f862347-508b-4c8a-a338-97972b0c0b0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1122.077256] env[68233]: DEBUG oslo_vmware.api [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302792} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.077256] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1122.077256] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1122.077256] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1122.077256] env[68233]: INFO nova.compute.manager [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1122.077256] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1122.077665] env[68233]: DEBUG nova.compute.manager [-] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1122.077665] env[68233]: DEBUG nova.network.neutron [-] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1122.138851] env[68233]: DEBUG nova.objects.instance [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'flavor' on Instance uuid 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.218438] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783128, 'name': Rename_Task, 'duration_secs': 0.176869} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.218614] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.219234] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ce3517b7-5c7b-4648-ae41-02f5c1a8e702 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.225255] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1122.225255] env[68233]: value = "task-2783131" [ 1122.225255] env[68233]: _type = "Task" [ 1122.225255] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.238519] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.242273] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126755} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.242633] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1122.243704] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc991e5-33a3-41a3-bbc0-52a40b7c7cbc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.256914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.257353] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.257651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.257888] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.258159] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.269825] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1122.270670] env[68233]: INFO nova.compute.manager [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Terminating instance [ 1122.272808] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eaf0ebfc-2bc3-4770-b7c8-35bcfd68bdef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.306544] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783130, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.309563] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1122.309563] env[68233]: value = "task-2783132" [ 1122.309563] env[68233]: _type = "Task" [ 1122.309563] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.319534] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.395870] env[68233]: DEBUG nova.compute.manager [req-85d3b0e1-bbcb-4385-b8d3-d790a71ce057 req-66b26739-828d-40d7-8a82-065de67c6294 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Received event network-vif-deleted-4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1122.395971] env[68233]: INFO nova.compute.manager [req-85d3b0e1-bbcb-4385-b8d3-d790a71ce057 req-66b26739-828d-40d7-8a82-065de67c6294 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Neutron deleted interface 4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23; detaching it from the instance and deleting it from the info cache [ 1122.396646] env[68233]: DEBUG nova.network.neutron [req-85d3b0e1-bbcb-4385-b8d3-d790a71ce057 req-66b26739-828d-40d7-8a82-065de67c6294 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.473987] env[68233]: DEBUG nova.compute.manager [req-f62539cf-87ef-4080-bc01-22428940fd67 req-3cacb84f-e632-41d9-ace4-eafc64b9a75e service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Received event network-vif-plugged-4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1122.474260] env[68233]: DEBUG oslo_concurrency.lockutils [req-f62539cf-87ef-4080-bc01-22428940fd67 req-3cacb84f-e632-41d9-ace4-eafc64b9a75e service nova] Acquiring lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.474537] env[68233]: DEBUG oslo_concurrency.lockutils [req-f62539cf-87ef-4080-bc01-22428940fd67 req-3cacb84f-e632-41d9-ace4-eafc64b9a75e service nova] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.474738] env[68233]: DEBUG oslo_concurrency.lockutils [req-f62539cf-87ef-4080-bc01-22428940fd67 req-3cacb84f-e632-41d9-ace4-eafc64b9a75e service nova] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.474968] env[68233]: DEBUG nova.compute.manager [req-f62539cf-87ef-4080-bc01-22428940fd67 req-3cacb84f-e632-41d9-ace4-eafc64b9a75e service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] No waiting events found dispatching network-vif-plugged-4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1122.475259] env[68233]: WARNING nova.compute.manager [req-f62539cf-87ef-4080-bc01-22428940fd67 req-3cacb84f-e632-41d9-ace4-eafc64b9a75e service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Received unexpected event network-vif-plugged-4ae388e9-417d-4206-9e31-b91986ba0652 for instance with vm_state building and task_state spawning. [ 1122.570520] env[68233]: DEBUG nova.network.neutron [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Successfully updated port: 4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1122.572197] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 5038002c-884f-4f75-a1fe-aa84220c9ea6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1122.572431] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1122.572881] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1122.733345] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54c76fc-dcca-478e-939f-860aedac247d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.741735] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.744652] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e82851-bc55-48de-b98a-9ec13f78cc52 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.775217] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e94e58-aa47-4c63-ac0a-acf12bb57f66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.783978] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75148efe-b814-4130-ba41-41dec2941c88 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.800737] env[68233]: DEBUG nova.compute.manager [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1122.800943] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1122.801397] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1122.803315] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829730bc-efaa-4083-b12e-d83d26c5c145 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.808550] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.790474} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.809515] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9/03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1122.809728] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1122.809984] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88fa357d-f2a1-4bb0-9c77-ce3bf76a5569 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.816224] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1122.816705] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b2ac799-9a30-4896-b7c9-3e38b0b211d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.822220] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783132, 'name': ReconfigVM_Task, 'duration_secs': 0.495638} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.823509] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1/3af7ccd5-f36b-4596-baf6-ed890e89d6a1.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1122.824131] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1122.824131] env[68233]: value = "task-2783133" [ 1122.824131] env[68233]: _type = "Task" [ 1122.824131] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.824312] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9042ce04-e724-4e50-bac2-d4bd7d1dcb6a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.834589] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783133, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.835660] env[68233]: DEBUG nova.network.neutron [-] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1122.836964] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1122.836964] env[68233]: value = "task-2783135" [ 1122.836964] env[68233]: _type = "Task" [ 1122.836964] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.845369] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783135, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.896320] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1122.896552] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1122.896732] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleting the datastore file [datastore2] 0d79ccd0-d24d-4200-9d34-f3a7f44370aa {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1122.897011] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7844226-20da-4921-ab6f-f9d28a1efba7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.899322] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45390b05-2249-4b96-a96f-7ea47c68668a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.907300] env[68233]: DEBUG oslo_vmware.api [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1122.907300] env[68233]: value = "task-2783136" [ 1122.907300] env[68233]: _type = "Task" [ 1122.907300] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.910493] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261d7379-4961-43e7-a0e5-58549fab1c70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.931101] env[68233]: DEBUG oslo_vmware.api [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.945341] env[68233]: DEBUG nova.compute.manager [req-85d3b0e1-bbcb-4385-b8d3-d790a71ce057 req-66b26739-828d-40d7-8a82-065de67c6294 service nova] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Detach interface failed, port_id=4f0efdc2-cfca-4fe8-ac6f-b3605ba94d23, reason: Instance 7025be4e-b800-42c8-a2c0-3ea059d3b929 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1123.077406] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1123.077406] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1123.077406] env[68233]: DEBUG nova.network.neutron [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1123.145578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c0ea176a-3b5c-440a-828b-f10c7df6fa29 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.235s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.238097] env[68233]: DEBUG oslo_vmware.api [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783131, 'name': PowerOnVM_Task, 'duration_secs': 0.546347} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.238358] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1123.238566] env[68233]: INFO nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Took 5.81 seconds to spawn the instance on the hypervisor. [ 1123.238745] env[68233]: DEBUG nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1123.239525] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082b3c96-bfc6-4509-90f6-2a72667e2b77 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.306864] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1123.335541] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783133, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161047} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.335811] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1123.336559] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0080f4-6668-4798-b46a-7914a0058c24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.339155] env[68233]: INFO nova.compute.manager [-] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Took 1.26 seconds to deallocate network for instance. [ 1123.365592] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9/03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1123.369403] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8051d71c-3521-47f3-8537-06d14f6fe289 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.384064] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783135, 'name': Rename_Task, 'duration_secs': 0.150985} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.385407] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.385753] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49c5554c-4ae8-4e0f-a434-8fbda4442a13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.390849] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1123.390849] env[68233]: value = "task-2783137" [ 1123.390849] env[68233]: _type = "Task" [ 1123.390849] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.394735] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1123.394735] env[68233]: value = "task-2783138" [ 1123.394735] env[68233]: _type = "Task" [ 1123.394735] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.402930] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783137, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.406095] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.417504] env[68233]: DEBUG oslo_vmware.api [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195882} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.417784] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.417983] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.418175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.418354] env[68233]: INFO nova.compute.manager [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1123.418636] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1123.418775] env[68233]: DEBUG nova.compute.manager [-] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1123.418869] env[68233]: DEBUG nova.network.neutron [-] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1123.612120] env[68233]: DEBUG nova.network.neutron [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1123.758684] env[68233]: DEBUG nova.network.neutron [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1123.760990] env[68233]: INFO nova.compute.manager [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Took 12.38 seconds to build instance. [ 1123.811596] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1123.811856] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.776s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.812146] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.186s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.812389] env[68233]: DEBUG nova.objects.instance [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lazy-loading 'resources' on Instance uuid ac108b76-385d-40c2-992c-dc7561227130 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1123.813573] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1123.814376] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Cleaning up deleted instances with incomplete migration {{(pid=68233) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1123.848403] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.902350] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783137, 'name': ReconfigVM_Task, 'duration_secs': 0.388824} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.902978] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9/03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1123.903659] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26fcb9bf-db2e-4054-b365-650da2a48de4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.908243] env[68233]: DEBUG oslo_vmware.api [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783138, 'name': PowerOnVM_Task, 'duration_secs': 0.468817} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.908811] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1123.909048] env[68233]: DEBUG nova.compute.manager [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1123.909801] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56461f8e-0721-4710-9e05-8c29a37493b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.913390] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1123.913390] env[68233]: value = "task-2783139" [ 1123.913390] env[68233]: _type = "Task" [ 1123.913390] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.926199] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783139, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.262890] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1124.263250] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Instance network_info: |[{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1124.263780] env[68233]: DEBUG oslo_concurrency.lockutils [None req-450b4f75-d335-46c2-9582-0b5602a1bd9a tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "550a52f6-e11b-4d34-ad82-9b39d33780d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.889s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.264049] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:ff:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ae388e9-417d-4206-9e31-b91986ba0652', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1124.271549] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1124.271768] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1124.271996] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed15f018-1a61-4a08-981b-9749c778e1f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.292523] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1124.292523] env[68233]: value = "task-2783140" [ 1124.292523] env[68233]: _type = "Task" [ 1124.292523] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.300048] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783140, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.321309] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1124.322410] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.323090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.323090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.323090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.323090] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.324818] env[68233]: INFO nova.compute.manager [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Terminating instance [ 1124.386878] env[68233]: DEBUG nova.network.neutron [-] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1124.429978] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783139, 'name': Rename_Task, 'duration_secs': 0.355041} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.433567] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1124.434058] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6c713a3-d745-413c-b7ce-66cc7525b8ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.436017] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.442376] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1124.442376] env[68233]: value = "task-2783141" [ 1124.442376] env[68233]: _type = "Task" [ 1124.442376] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.450978] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.506711] env[68233]: DEBUG nova.compute.manager [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Received event network-changed-4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1124.506910] env[68233]: DEBUG nova.compute.manager [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Refreshing instance network info cache due to event network-changed-4ae388e9-417d-4206-9e31-b91986ba0652. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1124.507202] env[68233]: DEBUG oslo_concurrency.lockutils [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1124.507371] env[68233]: DEBUG oslo_concurrency.lockutils [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1124.507554] env[68233]: DEBUG nova.network.neutron [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Refreshing network info cache for port 4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1124.522772] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d43818-bfe0-43df-b1cb-655d67500e15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.530343] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f47f1d-dbe5-4675-b467-a558ccd69d4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.561825] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de276e2-599a-4255-8d19-a9088182fdcd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.569110] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5510431f-702e-4265-ab6e-6202ff22c9f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.586213] env[68233]: DEBUG nova.compute.provider_tree [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1124.804038] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783140, 'name': CreateVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.828382] env[68233]: DEBUG nova.compute.manager [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1124.828645] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1124.829469] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21596d6f-24a2-4699-a6cf-29dd289976a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.837016] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1124.837316] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a603c688-d417-4c4c-99bf-1aae68e658bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.843128] env[68233]: DEBUG oslo_vmware.api [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1124.843128] env[68233]: value = "task-2783142" [ 1124.843128] env[68233]: _type = "Task" [ 1124.843128] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.850310] env[68233]: DEBUG oslo_vmware.api [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.892435] env[68233]: INFO nova.compute.manager [-] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Took 1.47 seconds to deallocate network for instance. [ 1124.908232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.908449] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.908634] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.908836] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.909045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.910970] env[68233]: INFO nova.compute.manager [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Terminating instance [ 1124.953718] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783141, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.077824] env[68233]: INFO nova.compute.manager [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Rebuilding instance [ 1125.088787] env[68233]: DEBUG nova.scheduler.client.report [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1125.119823] env[68233]: DEBUG nova.compute.manager [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1125.120700] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc43231-7367-4076-8e76-e21aec5c24af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.251829] env[68233]: DEBUG nova.network.neutron [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updated VIF entry in instance network info cache for port 4ae388e9-417d-4206-9e31-b91986ba0652. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1125.252342] env[68233]: DEBUG nova.network.neutron [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.304153] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783140, 'name': CreateVM_Task, 'duration_secs': 0.559675} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.304353] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1125.305115] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.305293] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.305643] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1125.305918] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea1618e9-acbc-4136-aafc-d36e5bb094e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.310682] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1125.310682] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528bcc4b-3f7f-7e6c-52b1-7c871ddead17" [ 1125.310682] env[68233]: _type = "Task" [ 1125.310682] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.319051] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528bcc4b-3f7f-7e6c-52b1-7c871ddead17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.352345] env[68233]: DEBUG oslo_vmware.api [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783142, 'name': PowerOffVM_Task, 'duration_secs': 0.183175} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.352629] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.352803] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1125.353086] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-232a6046-2f47-4899-a326-f3879f52d64e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.399533] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.414607] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "refresh_cache-3af7ccd5-f36b-4596-baf6-ed890e89d6a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.414868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquired lock "refresh_cache-3af7ccd5-f36b-4596-baf6-ed890e89d6a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.415043] env[68233]: DEBUG nova.network.neutron [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1125.419223] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.419458] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.419651] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleting the datastore file [datastore2] 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.419924] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01695e07-218c-4cca-8d9b-ac0f03a6bb0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.426766] env[68233]: DEBUG oslo_vmware.api [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1125.426766] env[68233]: value = "task-2783144" [ 1125.426766] env[68233]: _type = "Task" [ 1125.426766] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.434840] env[68233]: DEBUG oslo_vmware.api [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.451657] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783141, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.594490] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.782s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.597100] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.740s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.598613] env[68233]: INFO nova.compute.claims [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1125.616426] env[68233]: INFO nova.scheduler.client.report [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleted allocations for instance ac108b76-385d-40c2-992c-dc7561227130 [ 1125.755531] env[68233]: DEBUG oslo_concurrency.lockutils [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.755800] env[68233]: DEBUG nova.compute.manager [req-6601445a-60e2-49a9-aecc-d7003940d3c5 req-53776e65-620d-4aa7-910d-b19ca71bb548 service nova] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Received event network-vif-deleted-f23451b7-3ec0-4c70-93bd-a52f260a2dc1 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1125.821410] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528bcc4b-3f7f-7e6c-52b1-7c871ddead17, 'name': SearchDatastore_Task, 'duration_secs': 0.012117} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.821756] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.822009] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1125.822254] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.822413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.822625] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1125.822897] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84a056d5-6885-4792-8cf1-d1a8b777401a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.830397] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1125.830577] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1125.831307] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ecc439e-1fbd-4c26-9ff9-78b15a2acdc7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.837095] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1125.837095] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526dfa29-0804-8fb6-9161-4a663247044b" [ 1125.837095] env[68233]: _type = "Task" [ 1125.837095] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.845013] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526dfa29-0804-8fb6-9161-4a663247044b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.937552] env[68233]: DEBUG oslo_vmware.api [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293295} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.938464] env[68233]: DEBUG nova.network.neutron [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1125.940510] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.940733] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.940980] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.941193] env[68233]: INFO nova.compute.manager [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1125.941527] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.941741] env[68233]: DEBUG nova.compute.manager [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1125.941833] env[68233]: DEBUG nova.network.neutron [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1125.952309] env[68233]: DEBUG oslo_vmware.api [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783141, 'name': PowerOnVM_Task, 'duration_secs': 1.37771} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.952433] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1125.952597] env[68233]: INFO nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Took 11.70 seconds to spawn the instance on the hypervisor. [ 1125.952849] env[68233]: DEBUG nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1125.954449] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29dd1aaf-bc58-452d-aaf4-37f4fac45fee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.000500] env[68233]: DEBUG nova.network.neutron [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.124064] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9eed270d-dadc-47d3-a0ba-0bbb7569aa52 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "ac108b76-385d-40c2-992c-dc7561227130" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.721s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.133996] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1126.135806] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9fb1ade4-732f-4905-b557-ac745833ea19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.143957] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1126.143957] env[68233]: value = "task-2783145" [ 1126.143957] env[68233]: _type = "Task" [ 1126.143957] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.153221] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783145, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.347340] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526dfa29-0804-8fb6-9161-4a663247044b, 'name': SearchDatastore_Task, 'duration_secs': 0.027651} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.348204] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69716a02-189a-4118-af42-e695e47d73ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.353611] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1126.353611] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e90c58-9f1e-bb07-84eb-0a8dc28ccba9" [ 1126.353611] env[68233]: _type = "Task" [ 1126.353611] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.362609] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.362838] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.363050] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.363255] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.363400] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.365071] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e90c58-9f1e-bb07-84eb-0a8dc28ccba9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.365479] env[68233]: INFO nova.compute.manager [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Terminating instance [ 1126.472051] env[68233]: DEBUG nova.compute.manager [req-8b28cfa3-10f5-4e34-b8d3-08cac418b0af req-f349c234-ba8f-4f63-99d4-6abf01c74b55 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Received event network-vif-deleted-be512a20-e94b-4c51-8658-24c6e1feba94 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1126.472277] env[68233]: INFO nova.compute.manager [req-8b28cfa3-10f5-4e34-b8d3-08cac418b0af req-f349c234-ba8f-4f63-99d4-6abf01c74b55 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Neutron deleted interface be512a20-e94b-4c51-8658-24c6e1feba94; detaching it from the instance and deleting it from the info cache [ 1126.473029] env[68233]: DEBUG nova.network.neutron [req-8b28cfa3-10f5-4e34-b8d3-08cac418b0af req-f349c234-ba8f-4f63-99d4-6abf01c74b55 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.476132] env[68233]: INFO nova.compute.manager [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Took 31.15 seconds to build instance. [ 1126.504550] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Releasing lock "refresh_cache-3af7ccd5-f36b-4596-baf6-ed890e89d6a1" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.504983] env[68233]: DEBUG nova.compute.manager [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1126.505217] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1126.506879] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3274a0f9-545e-4c32-a56f-4bdf443fc09d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.516473] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1126.516674] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d8bf4b0-3dd2-4998-8275-45546abf0a19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.523633] env[68233]: DEBUG oslo_vmware.api [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1126.523633] env[68233]: value = "task-2783146" [ 1126.523633] env[68233]: _type = "Task" [ 1126.523633] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.532668] env[68233]: DEBUG oslo_vmware.api [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.658459] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783145, 'name': PowerOffVM_Task, 'duration_secs': 0.218213} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.658776] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1126.659424] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1126.661180] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad372d6-6418-494e-9167-12ce9668d6a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.678874] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1126.682181] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6ea435f2-3a25-49ed-a4e5-06a9a4636763 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.713508] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1126.713808] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1126.714155] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Deleting the datastore file [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1126.714889] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e4ade64-502d-4adc-9773-b5627359bd74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.721874] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1126.721874] env[68233]: value = "task-2783148" [ 1126.721874] env[68233]: _type = "Task" [ 1126.721874] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.734699] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.830142] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af167887-a103-4217-afc7-66a97a552cbe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.836793] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce14313f-5926-429f-9fbf-2528a5638a7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.871296] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d0d9a7-b1d0-43e1-86cb-ff5e7b6a9097 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.874066] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "refresh_cache-f53dccfc-9d0d-4eea-b94c-8527f707c5c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1126.874250] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquired lock "refresh_cache-f53dccfc-9d0d-4eea-b94c-8527f707c5c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1126.874427] env[68233]: DEBUG nova.network.neutron [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1126.883638] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52e90c58-9f1e-bb07-84eb-0a8dc28ccba9, 'name': SearchDatastore_Task, 'duration_secs': 0.013226} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.885373] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f5d12e-b058-4a92-bf1b-d4461d19ce2c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.889059] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.889311] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1126.889801] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9cdaa2b4-e879-4b9b-8a3a-f36d048115ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.903783] env[68233]: DEBUG nova.compute.provider_tree [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1126.906849] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1126.906849] env[68233]: value = "task-2783149" [ 1126.906849] env[68233]: _type = "Task" [ 1126.906849] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.909564] env[68233]: DEBUG nova.network.neutron [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.915636] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783149, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.977092] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bae2d0a-cc7d-434a-a350-268fc809e749 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.981140] env[68233]: DEBUG oslo_concurrency.lockutils [None req-86475a7a-9a54-4a55-828e-ee999654d26f tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.667s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.988561] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e84cdb42-5211-44c0-b7a3-0025d46249e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.019486] env[68233]: DEBUG nova.compute.manager [req-8b28cfa3-10f5-4e34-b8d3-08cac418b0af req-f349c234-ba8f-4f63-99d4-6abf01c74b55 service nova] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Detach interface failed, port_id=be512a20-e94b-4c51-8658-24c6e1feba94, reason: Instance 62cd066c-5eac-4f07-bf4e-9275fedc7384 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1127.032190] env[68233]: DEBUG oslo_vmware.api [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783146, 'name': PowerOffVM_Task, 'duration_secs': 0.286773} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.032468] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1127.032635] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1127.032886] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d433db51-2081-4ab0-bb03-c2894cc24f42 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.058837] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1127.059072] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1127.059260] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Deleting the datastore file [datastore2] 3af7ccd5-f36b-4596-baf6-ed890e89d6a1 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1127.059518] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d561d924-4cb1-4b03-9923-21b706dd74b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.067296] env[68233]: DEBUG oslo_vmware.api [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for the task: (returnval){ [ 1127.067296] env[68233]: value = "task-2783151" [ 1127.067296] env[68233]: _type = "Task" [ 1127.067296] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.074493] env[68233]: DEBUG oslo_vmware.api [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.175263] env[68233]: DEBUG oslo_concurrency.lockutils [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.175539] env[68233]: DEBUG oslo_concurrency.lockutils [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.234964] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199474} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.234964] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1127.234964] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1127.234964] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1127.399418] env[68233]: DEBUG nova.network.neutron [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1127.407910] env[68233]: DEBUG nova.scheduler.client.report [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1127.411833] env[68233]: INFO nova.compute.manager [-] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Took 1.47 seconds to deallocate network for instance. [ 1127.428310] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783149, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.463701] env[68233]: DEBUG nova.network.neutron [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.576922] env[68233]: DEBUG oslo_vmware.api [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Task: {'id': task-2783151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160329} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.577207] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1127.577394] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1127.577575] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1127.577759] env[68233]: INFO nova.compute.manager [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1127.578009] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1127.578209] env[68233]: DEBUG nova.compute.manager [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1127.578306] env[68233]: DEBUG nova.network.neutron [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1127.594016] env[68233]: DEBUG nova.network.neutron [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1127.681524] env[68233]: INFO nova.compute.manager [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Detaching volume 81086a97-e05a-4835-bd9a-78b8c85c5a53 [ 1127.717827] env[68233]: INFO nova.virt.block_device [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Attempting to driver detach volume 81086a97-e05a-4835-bd9a-78b8c85c5a53 from mountpoint /dev/sdb [ 1127.718080] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1127.718274] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1127.719138] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add02778-80b5-4e27-ba2c-888948d68fd0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.743936] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61c8990-8e05-47c4-8893-ff81d320a2dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.750531] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f0407e-65c1-439c-8398-692b41d5669e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.770343] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228f0270-56d2-4921-ad1b-0a29e2f7d8bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.784897] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] The volume has not been displaced from its original location: [datastore2] volume-81086a97-e05a-4835-bd9a-78b8c85c5a53/volume-81086a97-e05a-4835-bd9a-78b8c85c5a53.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1127.790021] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfiguring VM instance instance-0000005f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1127.790488] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8f6151c-d4c3-4b99-bd4e-cca17d8daf70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.808073] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1127.808073] env[68233]: value = "task-2783152" [ 1127.808073] env[68233]: _type = "Task" [ 1127.808073] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.815236] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783152, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.921100] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1127.921638] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1127.924952] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783149, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612698} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.926709] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.078s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.926996] env[68233]: DEBUG nova.objects.instance [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'resources' on Instance uuid 7025be4e-b800-42c8-a2c0-3ea059d3b929 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.928788] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.929158] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1127.929389] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1127.932305] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a8c0ffe-dcbe-4033-acc8-e8f45fd45f82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.934955] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.935181] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.941528] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1127.941528] env[68233]: value = "task-2783153" [ 1127.941528] env[68233]: _type = "Task" [ 1127.941528] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.951260] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783153, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.965793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Releasing lock "refresh_cache-f53dccfc-9d0d-4eea-b94c-8527f707c5c2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1127.966328] env[68233]: DEBUG nova.compute.manager [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1127.966554] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1127.968046] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64fe27d-fe18-4f99-adaf-c296debb85d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.978552] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.978806] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-65e4037f-3bc0-4b53-8a1f-fa4ead3aeaae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.984455] env[68233]: DEBUG oslo_vmware.api [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1127.984455] env[68233]: value = "task-2783154" [ 1127.984455] env[68233]: _type = "Task" [ 1127.984455] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.993239] env[68233]: DEBUG oslo_vmware.api [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783154, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.096968] env[68233]: DEBUG nova.network.neutron [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.276322] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1128.276588] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1128.276752] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1128.276933] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1128.277118] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1128.277282] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1128.277494] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1128.277667] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1128.277840] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1128.278013] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1128.278223] env[68233]: DEBUG nova.virt.hardware [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1128.279125] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce31db5-4f69-49e8-aefa-b0a2bf59bebb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.288058] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8563d446-60ca-41c3-8ff3-96c51335a2eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.301980] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance VIF info [] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1128.307678] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1128.308508] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1128.308508] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e81a1ed-2fad-4f79-8c8d-e58815eafe20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.329254] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783152, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.330470] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1128.330470] env[68233]: value = "task-2783155" [ 1128.330470] env[68233]: _type = "Task" [ 1128.330470] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.337878] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783155, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.426852] env[68233]: DEBUG nova.compute.utils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1128.428389] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1128.428562] env[68233]: DEBUG nova.network.neutron [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1128.438091] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1128.450665] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783153, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066473} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.450925] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1128.451979] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971847df-4c05-4d37-b10b-7f02d978f6cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.477959] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1128.480770] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e00c55d6-6206-479c-8db8-e9db10bbd46d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.498046] env[68233]: DEBUG nova.policy [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65225f2affe34ceda9a265989bddfc9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74a353ea173c4b8bb74b84032d4e12b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1128.500667] env[68233]: DEBUG nova.compute.manager [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1128.500879] env[68233]: DEBUG nova.compute.manager [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing instance network info cache due to event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1128.501110] env[68233]: DEBUG oslo_concurrency.lockutils [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1128.501258] env[68233]: DEBUG oslo_concurrency.lockutils [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1128.501418] env[68233]: DEBUG nova.network.neutron [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1128.511658] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1128.511658] env[68233]: value = "task-2783156" [ 1128.511658] env[68233]: _type = "Task" [ 1128.511658] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.520676] env[68233]: DEBUG oslo_vmware.api [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783154, 'name': PowerOffVM_Task, 'duration_secs': 0.302456} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.521322] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1128.521547] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1128.521812] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63503dcf-5e15-46bf-8016-6368a08eabeb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.529750] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.552272] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1128.552613] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1128.552827] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleting the datastore file [datastore2] f53dccfc-9d0d-4eea-b94c-8527f707c5c2 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1128.555797] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6491a8c-93ed-4b37-b3ea-aa11f762d330 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.561821] env[68233]: DEBUG oslo_vmware.api [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for the task: (returnval){ [ 1128.561821] env[68233]: value = "task-2783158" [ 1128.561821] env[68233]: _type = "Task" [ 1128.561821] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.570256] env[68233]: DEBUG oslo_vmware.api [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.599054] env[68233]: INFO nova.compute.manager [-] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Took 1.02 seconds to deallocate network for instance. [ 1128.651623] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28413ee1-ca0f-4074-8a4c-4eecc8303b05 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.658782] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17975896-2d16-404c-8fa9-95ffb04ab292 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.688515] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0740f898-7dd9-4ee0-b199-f980b6096287 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.695473] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ced57f5-2aa6-423d-a34d-7e8d7899387a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.708827] env[68233]: DEBUG nova.compute.provider_tree [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.783664] env[68233]: DEBUG nova.network.neutron [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Successfully created port: 922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1128.830849] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783152, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.841018] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783155, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.933492] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1128.961513] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.024251] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.076180] env[68233]: DEBUG oslo_vmware.api [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Task: {'id': task-2783158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.238749} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.076180] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.076371] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1129.076592] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1129.076804] env[68233]: INFO nova.compute.manager [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1129.077102] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1129.077322] env[68233]: DEBUG nova.compute.manager [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1129.077470] env[68233]: DEBUG nova.network.neutron [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1129.105080] env[68233]: DEBUG nova.network.neutron [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1129.107813] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.213048] env[68233]: DEBUG nova.scheduler.client.report [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.277325] env[68233]: DEBUG nova.network.neutron [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updated VIF entry in instance network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1129.277762] env[68233]: DEBUG nova.network.neutron [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.331485] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783152, 'name': ReconfigVM_Task, 'duration_secs': 1.273451} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.334978] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Reconfigured VM instance instance-0000005f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1129.339498] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fedf9fe3-eee7-44ee-a2af-a508c999b742 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.354973] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783155, 'name': CreateVM_Task, 'duration_secs': 0.872524} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.356074] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1129.356407] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1129.356407] env[68233]: value = "task-2783159" [ 1129.356407] env[68233]: _type = "Task" [ 1129.356407] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.356761] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.356918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.357245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1129.357534] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e2b0310-100c-4adb-882c-a20891a1d456 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.365453] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1129.365453] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52422ba0-2835-d227-9dee-453755469674" [ 1129.365453] env[68233]: _type = "Task" [ 1129.365453] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.368790] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783159, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.376791] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52422ba0-2835-d227-9dee-453755469674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.522495] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783156, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.609236] env[68233]: DEBUG nova.network.neutron [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1129.717259] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.790s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.719669] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.284s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.719853] env[68233]: DEBUG nova.objects.instance [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1129.739068] env[68233]: INFO nova.scheduler.client.report [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocations for instance 7025be4e-b800-42c8-a2c0-3ea059d3b929 [ 1129.780405] env[68233]: DEBUG oslo_concurrency.lockutils [req-4a1233c9-0b4a-44bd-b01a-72eafa1949c1 req-c365ad87-7715-4595-9258-2f5ae4e9023a service nova] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.867854] env[68233]: DEBUG oslo_vmware.api [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783159, 'name': ReconfigVM_Task, 'duration_secs': 0.143372} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.868173] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559496', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'name': 'volume-81086a97-e05a-4835-bd9a-78b8c85c5a53', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '35587446-6f3b-465b-a2a6-0b154374734c', 'attached_at': '', 'detached_at': '', 'volume_id': '81086a97-e05a-4835-bd9a-78b8c85c5a53', 'serial': '81086a97-e05a-4835-bd9a-78b8c85c5a53'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1129.880656] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52422ba0-2835-d227-9dee-453755469674, 'name': SearchDatastore_Task, 'duration_secs': 0.0196} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.880982] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1129.881234] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1129.881507] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.881742] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.881991] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1129.882237] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c7df675-b1db-40d7-8cca-34be3854cf4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.890973] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1129.891189] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1129.892296] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6829fdb-3576-4f34-a3a1-5ddaac8286c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.897293] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1129.897293] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5226aa69-3d89-805a-b30d-49b06337a64d" [ 1129.897293] env[68233]: _type = "Task" [ 1129.897293] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.904831] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5226aa69-3d89-805a-b30d-49b06337a64d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.943017] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1129.968333] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1129.968586] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1129.968743] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1129.968925] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1129.969087] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1129.969239] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1129.969447] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1129.969604] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1129.969770] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1129.969928] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1129.970120] env[68233]: DEBUG nova.virt.hardware [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1129.970988] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ee4799-3b40-42b2-859c-df22227848f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.978560] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1fd224-c873-4cd1-ba2d-daef52e1fcdd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.021459] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783156, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.111775] env[68233]: INFO nova.compute.manager [-] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Took 1.03 seconds to deallocate network for instance. [ 1130.228631] env[68233]: DEBUG nova.network.neutron [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Successfully updated port: 922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1130.245385] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6191ddb4-b961-4879-9b42-62afe1fa7fa4 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "7025be4e-b800-42c8-a2c0-3ea059d3b929" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.917s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.407946] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5226aa69-3d89-805a-b30d-49b06337a64d, 'name': SearchDatastore_Task, 'duration_secs': 0.030916} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.409398] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf651aba-2266-400f-9810-1d654198c9fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.412452] env[68233]: DEBUG nova.objects.instance [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'flavor' on Instance uuid 35587446-6f3b-465b-a2a6-0b154374734c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.416891] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1130.416891] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5295ad2f-fbe4-6f36-63cb-029c509749fe" [ 1130.416891] env[68233]: _type = "Task" [ 1130.416891] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.425030] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5295ad2f-fbe4-6f36-63cb-029c509749fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.523798] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783156, 'name': ReconfigVM_Task, 'duration_secs': 1.606278} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.524080] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.524706] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1f0569a4-5a01-45a7-a7bf-9a11bd7280fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.531064] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1130.531064] env[68233]: value = "task-2783160" [ 1130.531064] env[68233]: _type = "Task" [ 1130.531064] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.538819] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783160, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.552872] env[68233]: DEBUG nova.compute.manager [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Received event network-vif-plugged-922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1130.553089] env[68233]: DEBUG oslo_concurrency.lockutils [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] Acquiring lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.553362] env[68233]: DEBUG oslo_concurrency.lockutils [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.553487] env[68233]: DEBUG oslo_concurrency.lockutils [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.553652] env[68233]: DEBUG nova.compute.manager [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] No waiting events found dispatching network-vif-plugged-922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1130.553824] env[68233]: WARNING nova.compute.manager [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Received unexpected event network-vif-plugged-922f97ce-4e0e-42e9-b56b-1e312580276a for instance with vm_state building and task_state spawning. [ 1130.553981] env[68233]: DEBUG nova.compute.manager [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Received event network-changed-922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1130.554149] env[68233]: DEBUG nova.compute.manager [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Refreshing instance network info cache due to event network-changed-922f97ce-4e0e-42e9-b56b-1e312580276a. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1130.554328] env[68233]: DEBUG oslo_concurrency.lockutils [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] Acquiring lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.554461] env[68233]: DEBUG oslo_concurrency.lockutils [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] Acquired lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.554640] env[68233]: DEBUG nova.network.neutron [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Refreshing network info cache for port 922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.619943] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.732042] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fb8a8ac4-d9a2-44ab-ae16-2922e60d36e9 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.733153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.334s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.733741] env[68233]: DEBUG nova.objects.instance [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'resources' on Instance uuid 0d79ccd0-d24d-4200-9d34-f3a7f44370aa {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.734936] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.930132] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5295ad2f-fbe4-6f36-63cb-029c509749fe, 'name': SearchDatastore_Task, 'duration_secs': 0.023324} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.930410] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.930668] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1130.930935] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4dd97ef-381f-4e3b-8e46-aacd1e438550 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.937416] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1130.937416] env[68233]: value = "task-2783161" [ 1130.937416] env[68233]: _type = "Task" [ 1130.937416] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.945195] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783161, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.041329] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783160, 'name': Rename_Task, 'duration_secs': 0.265913} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.041618] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1131.041856] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3be57c0-fd26-4d2d-b797-cee44fe39d99 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.048502] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1131.048502] env[68233]: value = "task-2783162" [ 1131.048502] env[68233]: _type = "Task" [ 1131.048502] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.055813] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.089633] env[68233]: DEBUG nova.network.neutron [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1131.174555] env[68233]: DEBUG nova.network.neutron [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.426575] env[68233]: DEBUG oslo_concurrency.lockutils [None req-244ddfd9-3432-4ff5-b1e1-0dbfaccf5f1f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.251s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.449639] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783161, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501141} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.449919] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1131.450153] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1131.450398] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8cbd8c29-f536-4b8d-8ef2-7e10ca49d5de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.455962] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1131.455962] env[68233]: value = "task-2783163" [ 1131.455962] env[68233]: _type = "Task" [ 1131.455962] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.465998] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783163, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.500152] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8a5253-0054-492c-ad15-5358cdaf2e32 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.507332] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3086f38-f43a-40a1-9788-22bcde9c8c33 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.538367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdbfa6a-686e-49fd-ae95-72b6b1729e36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.545712] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afdd0e8-f58d-4212-8ad1-9286a1b955ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.563325] env[68233]: DEBUG nova.compute.provider_tree [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.568033] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783162, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.677814] env[68233]: DEBUG oslo_concurrency.lockutils [req-6d497b8b-b041-48f6-a2e7-25974490f3b5 req-2731cc0b-d6c9-46f5-99bc-ccdb69309976 service nova] Releasing lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.678239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1131.678406] env[68233]: DEBUG nova.network.neutron [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1131.967360] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783163, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065011} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.967644] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1131.968446] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c115568-38e2-498d-a9a2-ad811c502b9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.988052] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1131.988334] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6480d527-4b3a-4706-83b4-a0cfa7df7710 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.008028] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1132.008028] env[68233]: value = "task-2783164" [ 1132.008028] env[68233]: _type = "Task" [ 1132.008028] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.015329] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783164, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.060601] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783162, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.070067] env[68233]: DEBUG nova.scheduler.client.report [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.214988] env[68233]: DEBUG nova.network.neutron [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1132.350514] env[68233]: DEBUG nova.network.neutron [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance_info_cache with network_info: [{"id": "922f97ce-4e0e-42e9-b56b-1e312580276a", "address": "fa:16:3e:a5:03:5b", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap922f97ce-4e", "ovs_interfaceid": "922f97ce-4e0e-42e9-b56b-1e312580276a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.517462] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783164, 'name': ReconfigVM_Task, 'duration_secs': 0.272426} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.517736] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7/550a52f6-e11b-4d34-ad82-9b39d33780d7.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1132.518393] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dbe42d51-3920-476c-b701-893873774232 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.525296] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1132.525296] env[68233]: value = "task-2783165" [ 1132.525296] env[68233]: _type = "Task" [ 1132.525296] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.533637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.533856] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.534069] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "35587446-6f3b-465b-a2a6-0b154374734c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.534257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.534444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.540343] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783165, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.540789] env[68233]: INFO nova.compute.manager [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Terminating instance [ 1132.560578] env[68233]: DEBUG oslo_vmware.api [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783162, 'name': PowerOnVM_Task, 'duration_secs': 1.308858} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.560824] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1132.561040] env[68233]: INFO nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1132.561227] env[68233]: DEBUG nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1132.561960] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83252ff8-e162-46b7-84e3-264ffba26a3d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.574687] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.576757] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.648s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.576977] env[68233]: DEBUG nova.objects.instance [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'resources' on Instance uuid 62cd066c-5eac-4f07-bf4e-9275fedc7384 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.598629] env[68233]: INFO nova.scheduler.client.report [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocations for instance 0d79ccd0-d24d-4200-9d34-f3a7f44370aa [ 1132.853649] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.854027] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Instance network_info: |[{"id": "922f97ce-4e0e-42e9-b56b-1e312580276a", "address": "fa:16:3e:a5:03:5b", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap922f97ce-4e", "ovs_interfaceid": "922f97ce-4e0e-42e9-b56b-1e312580276a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1132.854458] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:03:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '922f97ce-4e0e-42e9-b56b-1e312580276a', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1132.862374] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1132.862593] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1132.862815] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-944c3d60-64a8-466d-92af-47389207b4e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.882808] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1132.882808] env[68233]: value = "task-2783166" [ 1132.882808] env[68233]: _type = "Task" [ 1132.882808] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.890206] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783166, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.034856] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783165, 'name': Rename_Task, 'duration_secs': 0.141346} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.035200] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1133.035448] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96eb2242-07bd-445d-a6be-f4402540ce0e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.041435] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1133.041435] env[68233]: value = "task-2783167" [ 1133.041435] env[68233]: _type = "Task" [ 1133.041435] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.045039] env[68233]: DEBUG nova.compute.manager [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1133.045242] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1133.045946] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690fd26a-296e-406d-acfc-510f9728638b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.053011] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783167, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.055389] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1133.055617] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05f8771e-afed-4238-a6f9-847e3e3c2169 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.061030] env[68233]: DEBUG oslo_vmware.api [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1133.061030] env[68233]: value = "task-2783168" [ 1133.061030] env[68233]: _type = "Task" [ 1133.061030] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.069064] env[68233]: DEBUG oslo_vmware.api [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783168, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.078601] env[68233]: INFO nova.compute.manager [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Took 18.17 seconds to build instance. [ 1133.106193] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8faf51da-a023-4a3a-b66d-ca6f2e69bb88 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "0d79ccd0-d24d-4200-9d34-f3a7f44370aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.849s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.243204] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3337f85-1810-4fac-8c5a-645bcb817173 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.252124] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32028018-8825-4ee0-aa73-da9ce9f515b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.284734] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79934231-1c9d-42ff-868d-c4409c4a432e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.291621] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ab2cce-0ac5-40ae-8326-9d6cec02a9dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.304776] env[68233]: DEBUG nova.compute.provider_tree [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.395032] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783166, 'name': CreateVM_Task, 'duration_secs': 0.340333} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.395213] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1133.395810] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.395991] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.396355] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1133.396647] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-092ec5c4-b87e-4333-9008-4e70827ad075 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.401414] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1133.401414] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521f54c4-b652-814e-1e82-f0f8a3f39c2f" [ 1133.401414] env[68233]: _type = "Task" [ 1133.401414] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.410138] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521f54c4-b652-814e-1e82-f0f8a3f39c2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.552025] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783167, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.569812] env[68233]: DEBUG oslo_vmware.api [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783168, 'name': PowerOffVM_Task, 'duration_secs': 0.191783} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.570209] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1133.570473] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1133.570804] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83ff1000-8ef2-4af3-b17e-c3041db9afcb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.583413] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9833265d-04d2-422b-8bec-d990d30b209e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.683s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.645017] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1133.645257] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1133.645441] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore2] 35587446-6f3b-465b-a2a6-0b154374734c {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1133.645717] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0b87de0-538b-4112-acbb-ad37cfed6e9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.652396] env[68233]: DEBUG oslo_vmware.api [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1133.652396] env[68233]: value = "task-2783170" [ 1133.652396] env[68233]: _type = "Task" [ 1133.652396] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.660636] env[68233]: DEBUG oslo_vmware.api [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.811022] env[68233]: DEBUG nova.scheduler.client.report [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.918870] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521f54c4-b652-814e-1e82-f0f8a3f39c2f, 'name': SearchDatastore_Task, 'duration_secs': 0.010948} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.919226] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.919468] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1133.919707] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.919857] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.920043] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1133.920310] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5bed34e-6b6a-4db4-b60f-cab160b9bd10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.937700] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1133.937947] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1133.938613] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0a61d23-65d0-4ff1-a191-c61670636f8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.944377] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1133.944377] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52caa71b-4e69-aa80-244c-f49c4a92a52b" [ 1133.944377] env[68233]: _type = "Task" [ 1133.944377] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.952459] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52caa71b-4e69-aa80-244c-f49c4a92a52b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.037942] env[68233]: DEBUG nova.compute.manager [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Received event network-changed-4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1134.038247] env[68233]: DEBUG nova.compute.manager [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Refreshing instance network info cache due to event network-changed-4ae388e9-417d-4206-9e31-b91986ba0652. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1134.038478] env[68233]: DEBUG oslo_concurrency.lockutils [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.038539] env[68233]: DEBUG oslo_concurrency.lockutils [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.038695] env[68233]: DEBUG nova.network.neutron [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Refreshing network info cache for port 4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1134.053339] env[68233]: DEBUG oslo_vmware.api [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783167, 'name': PowerOnVM_Task, 'duration_secs': 0.665938} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.053619] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1134.053875] env[68233]: DEBUG nova.compute.manager [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1134.054713] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00dcfcd-47c9-4e28-97ea-0cb551313dc0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.128725] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.129566] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.168009] env[68233]: DEBUG oslo_vmware.api [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.423034} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.168447] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.168712] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.169044] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.169478] env[68233]: INFO nova.compute.manager [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1134.169701] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1134.169986] env[68233]: DEBUG nova.compute.manager [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1134.170165] env[68233]: DEBUG nova.network.neutron [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1134.313565] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.737s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.316095] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.355s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.317680] env[68233]: INFO nova.compute.claims [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1134.333274] env[68233]: INFO nova.scheduler.client.report [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted allocations for instance 62cd066c-5eac-4f07-bf4e-9275fedc7384 [ 1134.461365] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52caa71b-4e69-aa80-244c-f49c4a92a52b, 'name': SearchDatastore_Task, 'duration_secs': 0.00893} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.462302] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb4870cf-b8a8-4994-badb-1fa90acc89ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.468045] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1134.468045] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527d9de3-8c4e-48ca-dff2-a0a1e7fb3e28" [ 1134.468045] env[68233]: _type = "Task" [ 1134.468045] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.477326] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527d9de3-8c4e-48ca-dff2-a0a1e7fb3e28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.577110] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.636615] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.636832] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.636993] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.637160] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.637305] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.637451] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.637585] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1134.637730] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1134.691531] env[68233]: DEBUG nova.compute.manager [req-388c8348-2bf7-4b1f-883b-b3dc6f3bc33d req-00175159-d223-4c73-a33c-bc7ae204041f service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Received event network-vif-deleted-e2eef47a-821b-4644-9b1b-6ca932ebe044 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1134.691726] env[68233]: INFO nova.compute.manager [req-388c8348-2bf7-4b1f-883b-b3dc6f3bc33d req-00175159-d223-4c73-a33c-bc7ae204041f service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Neutron deleted interface e2eef47a-821b-4644-9b1b-6ca932ebe044; detaching it from the instance and deleting it from the info cache [ 1134.691894] env[68233]: DEBUG nova.network.neutron [req-388c8348-2bf7-4b1f-883b-b3dc6f3bc33d req-00175159-d223-4c73-a33c-bc7ae204041f service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.768760] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.769029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.796868] env[68233]: DEBUG nova.network.neutron [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updated VIF entry in instance network info cache for port 4ae388e9-417d-4206-9e31-b91986ba0652. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1134.797265] env[68233]: DEBUG nova.network.neutron [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.841416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9ebcad84-a60b-4a1a-a68f-c42e8c25b448 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "62cd066c-5eac-4f07-bf4e-9275fedc7384" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.519s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.979577] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527d9de3-8c4e-48ca-dff2-a0a1e7fb3e28, 'name': SearchDatastore_Task, 'duration_secs': 0.014096} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.979847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.980125] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6/5038002c-884f-4f75-a1fe-aa84220c9ea6.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1134.980449] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46edaef9-4507-49d5-9f25-6f9beb193b92 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.988970] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1134.988970] env[68233]: value = "task-2783171" [ 1134.988970] env[68233]: _type = "Task" [ 1134.988970] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.001244] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783171, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.081058] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "550a52f6-e11b-4d34-ad82-9b39d33780d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.081438] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "550a52f6-e11b-4d34-ad82-9b39d33780d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.081567] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "550a52f6-e11b-4d34-ad82-9b39d33780d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.081766] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "550a52f6-e11b-4d34-ad82-9b39d33780d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.081935] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "550a52f6-e11b-4d34-ad82-9b39d33780d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.084148] env[68233]: INFO nova.compute.manager [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Terminating instance [ 1135.140248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.174597] env[68233]: DEBUG nova.network.neutron [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.195210] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d27241b3-fc51-42db-a2f8-2b9dc244372d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.205231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f6723c-dd1e-4f0f-ad19-048b19692be5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.235670] env[68233]: DEBUG nova.compute.manager [req-388c8348-2bf7-4b1f-883b-b3dc6f3bc33d req-00175159-d223-4c73-a33c-bc7ae204041f service nova] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Detach interface failed, port_id=e2eef47a-821b-4644-9b1b-6ca932ebe044, reason: Instance 35587446-6f3b-465b-a2a6-0b154374734c could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1135.271230] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1135.300890] env[68233]: DEBUG oslo_concurrency.lockutils [req-79132ad4-af1c-4513-8e54-bfc6f5776975 req-6b81f091-c19f-45c7-8561-b6af1dbb2e0f service nova] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.500736] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783171, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.530718] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54999de1-2782-4a72-b7c2-32842174f82f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.539157] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44fbcfd0-da20-445e-b7a2-fb1a06537f12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.571447] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cb7b92-6ac5-4c91-a55e-adec3f59c968 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.578919] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa8b926-0922-499f-968a-608bd94899e1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.593101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "refresh_cache-550a52f6-e11b-4d34-ad82-9b39d33780d7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.593280] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquired lock "refresh_cache-550a52f6-e11b-4d34-ad82-9b39d33780d7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.593453] env[68233]: DEBUG nova.network.neutron [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1135.595109] env[68233]: DEBUG nova.compute.provider_tree [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.678061] env[68233]: INFO nova.compute.manager [-] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Took 1.51 seconds to deallocate network for instance. [ 1135.790974] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.003516] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783171, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608616} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.004179] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6/5038002c-884f-4f75-a1fe-aa84220c9ea6.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1136.004179] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.004405] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4042364-a1ad-48e7-ad10-79466b03ad7e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.011408] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1136.011408] env[68233]: value = "task-2783172" [ 1136.011408] env[68233]: _type = "Task" [ 1136.011408] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.018945] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783172, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.100896] env[68233]: DEBUG nova.scheduler.client.report [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1136.125155] env[68233]: DEBUG nova.network.neutron [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1136.184260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.201252] env[68233]: DEBUG nova.network.neutron [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.521042] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783172, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067355} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.521330] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.522163] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0f071c-b1b9-4aa1-a467-0e489713f421 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.527391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.527610] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.546351] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6/5038002c-884f-4f75-a1fe-aa84220c9ea6.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.548016] env[68233]: DEBUG nova.compute.utils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1136.550814] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb19557d-2952-4555-9c49-63a7912de926 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.566409] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 0.039s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.574338] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1136.574338] env[68233]: value = "task-2783173" [ 1136.574338] env[68233]: _type = "Task" [ 1136.574338] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.583512] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783173, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.606198] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.606791] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1136.609416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.502s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.609635] env[68233]: DEBUG nova.objects.instance [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lazy-loading 'resources' on Instance uuid 3af7ccd5-f36b-4596-baf6-ed890e89d6a1 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.705894] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Releasing lock "refresh_cache-550a52f6-e11b-4d34-ad82-9b39d33780d7" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.706414] env[68233]: DEBUG nova.compute.manager [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1136.706617] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1136.707535] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43575195-298d-4b7d-828a-68f1814933c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.716065] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1136.716349] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de8c6116-3808-4821-a152-e3e3d6f02e75 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.722874] env[68233]: DEBUG oslo_vmware.api [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1136.722874] env[68233]: value = "task-2783174" [ 1136.722874] env[68233]: _type = "Task" [ 1136.722874] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.730929] env[68233]: DEBUG oslo_vmware.api [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.002357] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.002479] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.086719] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783173, 'name': ReconfigVM_Task, 'duration_secs': 0.28997} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.087956] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6/5038002c-884f-4f75-a1fe-aa84220c9ea6.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.090409] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-056a20b2-f819-4ebe-afed-e30d36eebb49 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.097196] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1137.097196] env[68233]: value = "task-2783175" [ 1137.097196] env[68233]: _type = "Task" [ 1137.097196] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.106176] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783175, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.114539] env[68233]: DEBUG nova.compute.utils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1137.115774] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1137.115941] env[68233]: DEBUG nova.network.neutron [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1137.189180] env[68233]: DEBUG nova.policy [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1137.237882] env[68233]: DEBUG oslo_vmware.api [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783174, 'name': PowerOffVM_Task, 'duration_secs': 0.146211} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.238223] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1137.238401] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1137.238691] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3b5aafc-751a-41bd-b77d-7e303eae55d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.266134] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1137.266390] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1137.266675] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Deleting the datastore file [datastore2] 550a52f6-e11b-4d34-ad82-9b39d33780d7 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1137.266839] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7bab67a-41cb-402f-bee0-35f20c283e67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.276948] env[68233]: DEBUG oslo_vmware.api [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for the task: (returnval){ [ 1137.276948] env[68233]: value = "task-2783177" [ 1137.276948] env[68233]: _type = "Task" [ 1137.276948] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.284253] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d972626-c568-4a11-b1aa-03502efa44df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.291649] env[68233]: DEBUG oslo_vmware.api [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783177, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.294241] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a72cd6-2ea2-4c5e-b184-3f34a9a6f446 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.324734] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe5f0d-6d76-4b81-99d0-8dac1a37caad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.331838] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58e4aad-56fd-460f-8014-48e0f3a69579 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.345279] env[68233]: DEBUG nova.compute.provider_tree [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.504454] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1137.517564] env[68233]: DEBUG nova.network.neutron [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Successfully created port: d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1137.593758] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.594135] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.594402] env[68233]: INFO nova.compute.manager [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Attaching volume ccfad3f4-1e81-405b-8396-09efc21a5cb3 to /dev/sdb [ 1137.610462] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783175, 'name': Rename_Task, 'duration_secs': 0.187014} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.611401] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.611401] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8105dd98-25ff-437e-a237-71120bb42549 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.618167] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1137.618167] env[68233]: value = "task-2783178" [ 1137.618167] env[68233]: _type = "Task" [ 1137.618167] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.620634] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1137.630623] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.638811] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3022da5-b508-4712-9446-17993128f2cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.646391] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798d0c6c-5aa5-46f2-9627-62996f0b3c8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.660899] env[68233]: DEBUG nova.virt.block_device [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating existing volume attachment record: c36b3fa1-82b4-4d67-9121-eae09fbfc35e {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1137.789271] env[68233]: DEBUG oslo_vmware.api [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Task: {'id': task-2783177, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337151} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.789842] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1137.790054] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1137.790242] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1137.790414] env[68233]: INFO nova.compute.manager [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1137.790652] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1137.790836] env[68233]: DEBUG nova.compute.manager [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1137.790928] env[68233]: DEBUG nova.network.neutron [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1137.806769] env[68233]: DEBUG nova.network.neutron [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1137.848564] env[68233]: DEBUG nova.scheduler.client.report [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1138.029504] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.131538] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783178, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.309989] env[68233]: DEBUG nova.network.neutron [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1138.355425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.358358] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.738s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.358611] env[68233]: DEBUG nova.objects.instance [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lazy-loading 'resources' on Instance uuid f53dccfc-9d0d-4eea-b94c-8527f707c5c2 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1138.376316] env[68233]: INFO nova.scheduler.client.report [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Deleted allocations for instance 3af7ccd5-f36b-4596-baf6-ed890e89d6a1 [ 1138.628532] env[68233]: DEBUG oslo_vmware.api [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783178, 'name': PowerOnVM_Task, 'duration_secs': 0.593551} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.628852] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.629080] env[68233]: INFO nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Took 8.69 seconds to spawn the instance on the hypervisor. [ 1138.629267] env[68233]: DEBUG nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.630072] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c71840-1073-4c6b-bdb8-66be413e3035 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.633693] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1138.659972] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1138.660366] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1138.660479] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1138.660628] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1138.660788] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1138.660948] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1138.661181] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1138.661344] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1138.661580] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1138.661892] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1138.662170] env[68233]: DEBUG nova.virt.hardware [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1138.662988] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e202e3-3c27-47fe-9aca-99767812df60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.671230] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4474d68-4a1a-4c12-b3af-3a6dd3276e19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.811397] env[68233]: INFO nova.compute.manager [-] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Took 1.02 seconds to deallocate network for instance. [ 1138.883641] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ac2810f7-185e-4119-8cf3-913d3e95a3d0 tempest-ServerShowV254Test-1291780269 tempest-ServerShowV254Test-1291780269-project-member] Lock "3af7ccd5-f36b-4596-baf6-ed890e89d6a1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.975s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.996497] env[68233]: DEBUG nova.compute.manager [req-68aebb40-78f8-4b1f-b029-493d56ca89af req-c8d62bc4-1059-450a-8367-e5cadd984d1d service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-vif-plugged-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1138.996792] env[68233]: DEBUG oslo_concurrency.lockutils [req-68aebb40-78f8-4b1f-b029-493d56ca89af req-c8d62bc4-1059-450a-8367-e5cadd984d1d service nova] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.997064] env[68233]: DEBUG oslo_concurrency.lockutils [req-68aebb40-78f8-4b1f-b029-493d56ca89af req-c8d62bc4-1059-450a-8367-e5cadd984d1d service nova] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.997320] env[68233]: DEBUG oslo_concurrency.lockutils [req-68aebb40-78f8-4b1f-b029-493d56ca89af req-c8d62bc4-1059-450a-8367-e5cadd984d1d service nova] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.997498] env[68233]: DEBUG nova.compute.manager [req-68aebb40-78f8-4b1f-b029-493d56ca89af req-c8d62bc4-1059-450a-8367-e5cadd984d1d service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] No waiting events found dispatching network-vif-plugged-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1138.997601] env[68233]: WARNING nova.compute.manager [req-68aebb40-78f8-4b1f-b029-493d56ca89af req-c8d62bc4-1059-450a-8367-e5cadd984d1d service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received unexpected event network-vif-plugged-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c for instance with vm_state building and task_state spawning. [ 1139.028189] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09465bc7-ce33-4ec5-8796-442cca60497e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.040713] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb43c80-10b7-4cbe-b43f-5ebd349db93a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.072547] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a5076b-529b-4ec1-84a0-7381161b263b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.080521] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e0d01e-00d5-4c77-9ed3-8d3f66e1a7b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.096019] env[68233]: DEBUG nova.compute.provider_tree [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1139.126270] env[68233]: DEBUG nova.network.neutron [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Successfully updated port: d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1139.149600] env[68233]: INFO nova.compute.manager [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Took 17.31 seconds to build instance. [ 1139.318796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.597695] env[68233]: DEBUG nova.scheduler.client.report [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.631044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.631044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.631044] env[68233]: DEBUG nova.network.neutron [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.651309] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f527a518-3ba3-4b99-a966-882d628d2558 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.823s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.103583] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.106160] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.529s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.106363] env[68233]: DEBUG nova.objects.instance [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68233) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1140.121989] env[68233]: INFO nova.scheduler.client.report [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Deleted allocations for instance f53dccfc-9d0d-4eea-b94c-8527f707c5c2 [ 1140.178038] env[68233]: DEBUG nova.network.neutron [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1140.419926] env[68233]: DEBUG nova.network.neutron [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.632942] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4175a519-c851-433a-bdeb-ccaa37e0b8c4 tempest-ServerShowV247Test-1414150871 tempest-ServerShowV247Test-1414150871-project-member] Lock "f53dccfc-9d0d-4eea-b94c-8527f707c5c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.270s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.847150] env[68233]: DEBUG nova.compute.manager [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1140.924977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.925484] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Instance network_info: |[{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1140.925774] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:fa:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '130387c4-e4ec-4d95-8e9d-bb079baabad8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3ccdb5a-c47a-4c1a-bbab-97a2dab7185c', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.936633] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.937297] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1140.937909] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8570ed89-8203-4540-a8dd-4f6f91369955 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.963807] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.963807] env[68233]: value = "task-2783183" [ 1140.963807] env[68233]: _type = "Task" [ 1140.963807] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.973246] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783183, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.028551] env[68233]: DEBUG nova.compute.manager [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1141.029087] env[68233]: DEBUG nova.compute.manager [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing instance network info cache due to event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1141.029412] env[68233]: DEBUG oslo_concurrency.lockutils [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.029750] env[68233]: DEBUG oslo_concurrency.lockutils [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.030099] env[68233]: DEBUG nova.network.neutron [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.119229] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bd9e672d-6cd7-4ac4-9427-d34869fd994d tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.119229] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.978s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.119229] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.119229] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1141.120101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.328s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.121606] env[68233]: INFO nova.compute.claims [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1141.125756] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df79181-3b29-4a30-a5d6-025b915cdc14 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.134654] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb706bf-de60-4b4b-887a-437fc21376fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.156164] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d275a21c-e678-4899-9862-a9052cb0aacd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.165507] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c0cf86-f8a0-445e-b3dc-40fcefd45c5e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.202486] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180162MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1141.203378] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.374717] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.475170] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783183, 'name': CreateVM_Task, 'duration_secs': 0.407826} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.475277] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1141.475977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.476174] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.476501] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1141.476774] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33869c18-2fbf-452f-a246-c03fe0bfb737 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.481611] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1141.481611] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52627a69-f2f2-acac-b189-d46963431b92" [ 1141.481611] env[68233]: _type = "Task" [ 1141.481611] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.489635] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52627a69-f2f2-acac-b189-d46963431b92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.892164] env[68233]: DEBUG nova.network.neutron [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updated VIF entry in instance network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1141.892533] env[68233]: DEBUG nova.network.neutron [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.994948] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52627a69-f2f2-acac-b189-d46963431b92, 'name': SearchDatastore_Task, 'duration_secs': 0.026914} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.994948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.994948] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.995216] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.995367] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.995549] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.995813] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebf04d92-37a9-48cb-9ee2-d17b8ffde9d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.004051] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1142.004229] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1142.005077] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b10736d-da09-407f-9726-28b87eebb08f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.010426] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1142.010426] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52547c15-246d-bcfc-41c8-d5c3a882286c" [ 1142.010426] env[68233]: _type = "Task" [ 1142.010426] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.018148] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52547c15-246d-bcfc-41c8-d5c3a882286c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.212774] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1142.213090] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559525', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'name': 'volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2d04b37-3eae-46cb-a227-b62d36c62a6a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'serial': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1142.214031] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f07a06c-15ef-4992-8cfd-fa4f0a7baa73 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.239583] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e4fb592-7af5-4330-8877-bf54545c8ffb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.270918] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3/volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.273959] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ebde839-8bb9-4d45-99e6-9cd33dd3def1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.293043] env[68233]: DEBUG oslo_vmware.api [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1142.293043] env[68233]: value = "task-2783184" [ 1142.293043] env[68233]: _type = "Task" [ 1142.293043] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.303259] env[68233]: DEBUG oslo_vmware.api [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783184, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.342689] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac090022-c08c-47e8-9327-f7cd302675c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.353574] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe36277-7a5e-4412-bcd7-5ecb72d87edc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.386388] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d200061-ad58-4976-82e4-3aa4fde29fa9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.394194] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a5a6bb-1e0e-493f-8f9b-73a7523f9e26 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.400704] env[68233]: DEBUG oslo_concurrency.lockutils [req-3de9e77a-d16e-430f-a00d-11392af4b4de req-db7f56d0-a92a-4d5e-84b9-2de7c88bd2be service nova] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.411046] env[68233]: DEBUG nova.compute.provider_tree [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.521296] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52547c15-246d-bcfc-41c8-d5c3a882286c, 'name': SearchDatastore_Task, 'duration_secs': 0.009662} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.522209] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a12ec1c-a0a4-4c79-a5de-b021ac97f4ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.528233] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1142.528233] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263d56c-83aa-8530-20e7-f14d8792259d" [ 1142.528233] env[68233]: _type = "Task" [ 1142.528233] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.536658] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263d56c-83aa-8530-20e7-f14d8792259d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.803266] env[68233]: DEBUG oslo_vmware.api [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783184, 'name': ReconfigVM_Task, 'duration_secs': 0.378828} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.803560] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Reconfigured VM instance instance-00000069 to attach disk [datastore2] volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3/volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1142.808488] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e632b598-19ac-4c74-97b8-163b815ab7cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.823852] env[68233]: DEBUG oslo_vmware.api [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1142.823852] env[68233]: value = "task-2783185" [ 1142.823852] env[68233]: _type = "Task" [ 1142.823852] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.834446] env[68233]: DEBUG oslo_vmware.api [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.940857] env[68233]: ERROR nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [req-41d10a3b-29e0-4568-8a36-48ca66156c90] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-41d10a3b-29e0-4568-8a36-48ca66156c90"}]} [ 1142.958691] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1142.972770] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1142.972992] env[68233]: DEBUG nova.compute.provider_tree [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1142.985076] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1143.004928] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1143.037745] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5263d56c-83aa-8530-20e7-f14d8792259d, 'name': SearchDatastore_Task, 'duration_secs': 0.027311} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.040199] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.040466] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 171da032-9aeb-4972-8ec7-4181e2667ac0/171da032-9aeb-4972-8ec7-4181e2667ac0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1143.040896] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-744b55c3-7393-47e6-bc48-711ef16832df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.047635] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1143.047635] env[68233]: value = "task-2783186" [ 1143.047635] env[68233]: _type = "Task" [ 1143.047635] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.060025] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783186, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.164332] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42763de6-4a2d-4468-878b-814951e50ecd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.171597] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef820d44-d078-4011-bfd5-930c235c2e48 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.202586] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dbb27e-1754-4a9d-8057-2be62c88e63c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.210929] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615e068a-1831-4f27-8f31-b2d275084968 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.230033] env[68233]: DEBUG nova.compute.provider_tree [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1143.334098] env[68233]: DEBUG oslo_vmware.api [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783185, 'name': ReconfigVM_Task, 'duration_secs': 0.13688} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.334449] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559525', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'name': 'volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2d04b37-3eae-46cb-a227-b62d36c62a6a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'serial': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1143.557510] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783186, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488937} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.558757] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 171da032-9aeb-4972-8ec7-4181e2667ac0/171da032-9aeb-4972-8ec7-4181e2667ac0.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1143.558990] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1143.559268] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c5d10596-92b1-491a-bf31-156d1c8be848 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.566871] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1143.566871] env[68233]: value = "task-2783187" [ 1143.566871] env[68233]: _type = "Task" [ 1143.566871] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.579837] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783187, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.644105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.644105] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.750499] env[68233]: ERROR nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [req-471eb313-a626-4c95-b206-699c88078a1a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-471eb313-a626-4c95-b206-699c88078a1a"}]} [ 1143.768095] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1143.780497] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1143.780708] env[68233]: DEBUG nova.compute.provider_tree [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1143.790781] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1143.814091] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1143.970223] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3894f7d-0889-4372-9cab-3884bdda7026 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.977657] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22468438-ae55-462c-aa1d-e07120bf8f2f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.007168] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30bac29b-8b43-4a8f-95f6-f2ad6b374b8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.013913] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a64be45-37a1-416c-b46b-2392521a6c0b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.026952] env[68233]: DEBUG nova.compute.provider_tree [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.078383] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783187, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.294165} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.078738] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1144.079835] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550e6bfe-2d30-4fa2-bdcc-a94faf10ab8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.112715] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 171da032-9aeb-4972-8ec7-4181e2667ac0/171da032-9aeb-4972-8ec7-4181e2667ac0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1144.113577] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f53136c-c017-4e94-a5d3-1d554a028a82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.141935] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1144.141935] env[68233]: value = "task-2783188" [ 1144.141935] env[68233]: _type = "Task" [ 1144.141935] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.146393] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1144.151920] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.375674] env[68233]: DEBUG nova.objects.instance [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.529925] env[68233]: DEBUG nova.scheduler.client.report [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.652573] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783188, 'name': ReconfigVM_Task, 'duration_secs': 0.292436} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.654682] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 171da032-9aeb-4972-8ec7-4181e2667ac0/171da032-9aeb-4972-8ec7-4181e2667ac0.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.657243] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a745ddc2-4435-4a67-89e6-881976a4477d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.665088] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1144.665088] env[68233]: value = "task-2783189" [ 1144.665088] env[68233]: _type = "Task" [ 1144.665088] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.672697] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783189, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.673543] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.881029] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b68272e5-2c78-4164-beb6-e33bba84a507 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.287s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.994325] env[68233]: DEBUG oslo_concurrency.lockutils [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.994589] env[68233]: DEBUG oslo_concurrency.lockutils [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.994765] env[68233]: DEBUG nova.compute.manager [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.996177] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31070b18-647d-4620-b7c4-a6e5d7ede277 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.003103] env[68233]: DEBUG nova.compute.manager [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1145.003674] env[68233]: DEBUG nova.objects.instance [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.034516] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.915s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.035309] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1145.039831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.855s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.039831] env[68233]: DEBUG nova.objects.instance [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'resources' on Instance uuid 35587446-6f3b-465b-a2a6-0b154374734c {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1145.174796] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783189, 'name': Rename_Task, 'duration_secs': 0.141196} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.175094] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.175340] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-119baa9b-ff8a-4b8a-a6db-3208fef059f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.183037] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1145.183037] env[68233]: value = "task-2783190" [ 1145.183037] env[68233]: _type = "Task" [ 1145.183037] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.189966] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.543372] env[68233]: DEBUG nova.compute.utils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1145.547440] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1145.547602] env[68233]: DEBUG nova.network.neutron [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1145.596773] env[68233]: DEBUG nova.policy [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd2e45602ffbf4d66b6bfcac59f078e0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd32ae322ad5641b4bebd1aa390b5914f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1145.693098] env[68233]: DEBUG oslo_vmware.api [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783190, 'name': PowerOnVM_Task, 'duration_secs': 0.454198} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.695634] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1145.695845] env[68233]: INFO nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Took 7.06 seconds to spawn the instance on the hypervisor. [ 1145.696039] env[68233]: DEBUG nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1145.697151] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db133725-253f-43d0-9a75-60dc1b8fc008 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.709022] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee51001-5283-4e95-a35f-1cf354e01586 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.714496] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b362ed7e-8a6e-4db7-b8b6-34ec466339f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.745440] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491578b9-f75c-486d-b44f-943c5de164f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.754498] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c065102c-82ef-4d6b-b137-a97be1a0b719 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.769900] env[68233]: DEBUG nova.compute.provider_tree [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.953153] env[68233]: DEBUG nova.network.neutron [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Successfully created port: 3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1146.011243] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.011638] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e338dc7-a4ea-4b2a-ab0d-9fba900baa45 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.020457] env[68233]: DEBUG oslo_vmware.api [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1146.020457] env[68233]: value = "task-2783191" [ 1146.020457] env[68233]: _type = "Task" [ 1146.020457] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.029260] env[68233]: DEBUG oslo_vmware.api [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.048660] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1146.218721] env[68233]: INFO nova.compute.manager [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Took 17.27 seconds to build instance. [ 1146.273101] env[68233]: DEBUG nova.scheduler.client.report [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.530620] env[68233]: DEBUG oslo_vmware.api [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783191, 'name': PowerOffVM_Task, 'duration_secs': 0.203747} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.530899] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1146.531116] env[68233]: DEBUG nova.compute.manager [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.531886] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a21b54-bf9b-469b-bda5-dce0a00c3d7d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.721641] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0292b77a-24ca-4302-bd5e-a4ad12d8beca tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.786s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.778731] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.739s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.781900] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.752s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.783017] env[68233]: INFO nova.compute.claims [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1146.801858] env[68233]: INFO nova.scheduler.client.report [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted allocations for instance 35587446-6f3b-465b-a2a6-0b154374734c [ 1147.042584] env[68233]: DEBUG oslo_concurrency.lockutils [None req-511f28be-c92f-4f83-953e-58665cddf008 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.064616] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1147.103584] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1147.103833] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1147.104044] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1147.104257] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1147.104409] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1147.104558] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1147.104776] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1147.104962] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1147.105160] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1147.105325] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1147.105501] env[68233]: DEBUG nova.virt.hardware [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1147.106792] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586de452-9ad0-4dc9-b512-91f8363e7353 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.116780] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6260d9d-7df5-4133-a0a1-58152b7f6348 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.223216] env[68233]: DEBUG nova.compute.manager [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1147.223605] env[68233]: DEBUG nova.compute.manager [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing instance network info cache due to event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1147.224044] env[68233]: DEBUG oslo_concurrency.lockutils [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.224377] env[68233]: DEBUG oslo_concurrency.lockutils [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.224949] env[68233]: DEBUG nova.network.neutron [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1147.311986] env[68233]: DEBUG oslo_concurrency.lockutils [None req-07aafc91-873d-4eb4-9920-d83688082bd9 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "35587446-6f3b-465b-a2a6-0b154374734c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.778s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.641086] env[68233]: DEBUG nova.objects.instance [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.902105] env[68233]: DEBUG nova.network.neutron [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Successfully updated port: 3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1148.001421] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa1abf6-5e36-461c-a8bd-8aeb957e54cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.008915] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9274cfcd-816b-44a2-8cc3-268bd8d51b08 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.045142] env[68233]: DEBUG nova.network.neutron [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updated VIF entry in instance network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1148.046371] env[68233]: DEBUG nova.network.neutron [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.047181] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acfc029-78fa-4a3c-8b9d-f93d80397476 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.059419] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef61b132-edb1-4bd4-8506-7878803e46d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.075028] env[68233]: DEBUG nova.compute.provider_tree [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.145795] env[68233]: DEBUG oslo_concurrency.lockutils [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.146261] env[68233]: DEBUG oslo_concurrency.lockutils [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.146261] env[68233]: DEBUG nova.network.neutron [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.147056] env[68233]: DEBUG nova.objects.instance [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'info_cache' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1148.404425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.404425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.404425] env[68233]: DEBUG nova.network.neutron [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.555816] env[68233]: DEBUG oslo_concurrency.lockutils [req-9eb16b75-85dc-4848-92fd-ef2bb16bd450 req-9469a052-9356-40c4-8b86-abe3930ed1b9 service nova] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.577869] env[68233]: DEBUG nova.scheduler.client.report [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.652868] env[68233]: DEBUG nova.objects.base [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1148.935822] env[68233]: DEBUG nova.network.neutron [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1149.059803] env[68233]: DEBUG nova.network.neutron [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance_info_cache with network_info: [{"id": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "address": "fa:16:3e:32:28:bf", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0ccb34-9d", "ovs_interfaceid": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.083746] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.084489] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1149.087223] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.769s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.087450] env[68233]: DEBUG nova.objects.instance [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lazy-loading 'resources' on Instance uuid 550a52f6-e11b-4d34-ad82-9b39d33780d7 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1149.257142] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1149.257142] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing instance network info cache due to event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1149.257251] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.257378] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.257596] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.368226] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "81e0800d-7731-433c-9238-b4aa07a4ddda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.369031] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.388971] env[68233]: DEBUG nova.network.neutron [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating instance_info_cache with network_info: [{"id": "16ec4545-d69d-43bf-a956-54414f895c1e", "address": "fa:16:3e:82:05:b4", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ec4545-d6", "ovs_interfaceid": "16ec4545-d69d-43bf-a956-54414f895c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.562575] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.562964] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Instance network_info: |[{"id": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "address": "fa:16:3e:32:28:bf", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0ccb34-9d", "ovs_interfaceid": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1149.563386] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:28:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f0ccb34-9d4b-457b-8eb8-4110c1b41180', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1149.570723] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1149.570937] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1149.571181] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c284d747-efbc-400d-be1f-72acd0ddaa24 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.593215] env[68233]: DEBUG nova.compute.utils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1149.595712] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1149.595890] env[68233]: DEBUG nova.network.neutron [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1149.597458] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1149.597458] env[68233]: value = "task-2783192" [ 1149.597458] env[68233]: _type = "Task" [ 1149.597458] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.607308] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783192, 'name': CreateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.633112] env[68233]: DEBUG nova.policy [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b450a75286a9438081aa60c4b5cfeab3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9df7c30630584a2bb79e798dcc571850', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1149.762295] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b21161-4ba3-46d9-a603-f1e1516f86ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.770591] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318f6803-2b97-4ddc-94ed-e2020312d60f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.804721] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bcc516b-2b50-4d08-9b21-04be820f152f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.812067] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715a2bc9-aae9-42c3-a749-82ecec46bc1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.825123] env[68233]: DEBUG nova.compute.provider_tree [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.870708] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1149.891019] env[68233]: DEBUG oslo_concurrency.lockutils [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.924453] env[68233]: DEBUG nova.network.neutron [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Successfully created port: b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1150.039820] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updated VIF entry in instance network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.040219] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.098930] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1150.116380] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783192, 'name': CreateVM_Task, 'duration_secs': 0.387045} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.116731] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1150.117554] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.117883] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.118322] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1150.118774] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01e38ae9-7f8f-4f9c-9e37-7e25915fe068 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.126387] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1150.126387] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526e8423-5c50-4d0a-7b34-fecfc0be9647" [ 1150.126387] env[68233]: _type = "Task" [ 1150.126387] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.133816] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526e8423-5c50-4d0a-7b34-fecfc0be9647, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.328102] env[68233]: DEBUG nova.scheduler.client.report [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.392145] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.543300] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.543394] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Received event network-vif-plugged-3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1150.543652] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.543769] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.543984] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.544197] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] No waiting events found dispatching network-vif-plugged-3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1150.544369] env[68233]: WARNING nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Received unexpected event network-vif-plugged-3f0ccb34-9d4b-457b-8eb8-4110c1b41180 for instance with vm_state building and task_state spawning. [ 1150.544531] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Received event network-changed-3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1150.544690] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Refreshing instance network info cache due to event network-changed-3f0ccb34-9d4b-457b-8eb8-4110c1b41180. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1150.544884] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquiring lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.545055] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquired lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.545646] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Refreshing network info cache for port 3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1150.633892] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526e8423-5c50-4d0a-7b34-fecfc0be9647, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.634208] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.634410] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1150.634640] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.634784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.634980] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.635302] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e6882e4b-6049-4da0-9d4e-27354b84fcce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.643256] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.643427] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1150.644122] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-249d4d15-4c38-4775-8b5e-9a61da459c66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.649240] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1150.649240] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5235179d-2a6e-d7b8-2693-678b1b38c36f" [ 1150.649240] env[68233]: _type = "Task" [ 1150.649240] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.656845] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5235179d-2a6e-d7b8-2693-678b1b38c36f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.833563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.746s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.837045] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 9.634s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.853032] env[68233]: INFO nova.scheduler.client.report [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Deleted allocations for instance 550a52f6-e11b-4d34-ad82-9b39d33780d7 [ 1150.898305] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1150.899109] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd32f14e-a7b6-4e72-ac95-9c124cc958ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.906117] env[68233]: DEBUG oslo_vmware.api [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1150.906117] env[68233]: value = "task-2783193" [ 1150.906117] env[68233]: _type = "Task" [ 1150.906117] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.913822] env[68233]: DEBUG oslo_vmware.api [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.112368] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1151.139615] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1151.139807] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.139844] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1151.142803] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.143080] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1151.143262] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1151.143484] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1151.143648] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1151.143818] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1151.144069] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1151.144233] env[68233]: DEBUG nova.virt.hardware [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1151.145148] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46abf57b-e701-4164-bcb3-899d36897cbf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.158034] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde692f2-138c-469c-a346-aab5590c4612 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.166286] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5235179d-2a6e-d7b8-2693-678b1b38c36f, 'name': SearchDatastore_Task, 'duration_secs': 0.008429} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.167496] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec97a2f9-58e2-4fad-beff-535bebe9641b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.183054] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1151.183054] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a80f2-eb19-ead4-3338-b93a2be94131" [ 1151.183054] env[68233]: _type = "Task" [ 1151.183054] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.190996] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a80f2-eb19-ead4-3338-b93a2be94131, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.291690] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updated VIF entry in instance network info cache for port 3f0ccb34-9d4b-457b-8eb8-4110c1b41180. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.292076] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance_info_cache with network_info: [{"id": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "address": "fa:16:3e:32:28:bf", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0ccb34-9d", "ovs_interfaceid": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.356923] env[68233]: DEBUG nova.compute.manager [req-074b058f-8444-4fd6-93ec-9032fa086d76 req-b0356607-1a53-47aa-83e2-c8d469cd99ee service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-vif-plugged-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1151.357174] env[68233]: DEBUG oslo_concurrency.lockutils [req-074b058f-8444-4fd6-93ec-9032fa086d76 req-b0356607-1a53-47aa-83e2-c8d469cd99ee service nova] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1151.357401] env[68233]: DEBUG oslo_concurrency.lockutils [req-074b058f-8444-4fd6-93ec-9032fa086d76 req-b0356607-1a53-47aa-83e2-c8d469cd99ee service nova] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1151.357571] env[68233]: DEBUG oslo_concurrency.lockutils [req-074b058f-8444-4fd6-93ec-9032fa086d76 req-b0356607-1a53-47aa-83e2-c8d469cd99ee service nova] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.357786] env[68233]: DEBUG nova.compute.manager [req-074b058f-8444-4fd6-93ec-9032fa086d76 req-b0356607-1a53-47aa-83e2-c8d469cd99ee service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] No waiting events found dispatching network-vif-plugged-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1151.357896] env[68233]: WARNING nova.compute.manager [req-074b058f-8444-4fd6-93ec-9032fa086d76 req-b0356607-1a53-47aa-83e2-c8d469cd99ee service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received unexpected event network-vif-plugged-b4077afe-a7b6-4653-be23-4c735d67fa05 for instance with vm_state building and task_state spawning. [ 1151.362895] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d81c3c5c-2dc8-41fa-8f29-357996afe959 tempest-ServersListShow296Test-754993983 tempest-ServersListShow296Test-754993983-project-member] Lock "550a52f6-e11b-4d34-ad82-9b39d33780d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.281s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.415589] env[68233]: DEBUG oslo_vmware.api [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783193, 'name': PowerOnVM_Task, 'duration_secs': 0.506122} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.415859] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1151.416080] env[68233]: DEBUG nova.compute.manager [None req-264339a4-9516-40a3-b049-2cc18de7f751 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1151.417081] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1afb0c5-5268-4ae5-9987-0d17789c68b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.510612] env[68233]: DEBUG nova.network.neutron [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Successfully updated port: b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1151.693591] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521a80f2-eb19-ead4-3338-b93a2be94131, 'name': SearchDatastore_Task, 'duration_secs': 0.012038} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.693865] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.694179] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b/151b16bc-6b78-4527-8571-b07b5ad7db7b.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1151.694452] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-640ed4c5-5976-4c2f-8136-48c895c43146 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.701785] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1151.701785] env[68233]: value = "task-2783194" [ 1151.701785] env[68233]: _type = "Task" [ 1151.701785] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.712011] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783194, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.797407] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Releasing lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.797407] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1151.797407] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing instance network info cache due to event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1151.797407] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.797407] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1151.797407] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.854032] env[68233]: INFO nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating resource usage from migration 91749297-59a7-44a7-b90c-8e7f6539e7c9 [ 1151.871982] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872169] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c2d04b37-3eae-46cb-a227-b62d36c62a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872312] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872451] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9f862347-508b-4c8a-a338-97972b0c0b0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872569] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 171da032-9aeb-4972-8ec7-4181e2667ac0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872681] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 151b16bc-6b78-4527-8571-b07b5ad7db7b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872792] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 863e15c6-caa4-47aa-902a-7be2c9538687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1151.872901] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Migration 91749297-59a7-44a7-b90c-8e7f6539e7c9 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1151.873026] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 5038002c-884f-4f75-a1fe-aa84220c9ea6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1152.014905] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.015125] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.015362] env[68233]: DEBUG nova.network.neutron [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.213082] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783194, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.376335] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance aadc7dbe-456c-4bf3-b26d-bac672459fb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1152.480438] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updated VIF entry in instance network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1152.480789] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.544573] env[68233]: DEBUG nova.network.neutron [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1152.659273] env[68233]: DEBUG nova.network.neutron [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.713901] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783194, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529257} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.714179] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b/151b16bc-6b78-4527-8571-b07b5ad7db7b.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1152.714393] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1152.714631] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58f1e2d8-e5fd-4d00-9d1c-5dfc0d11bdb4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.721153] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1152.721153] env[68233]: value = "task-2783195" [ 1152.721153] env[68233]: _type = "Task" [ 1152.721153] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.728587] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.882194] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 81e0800d-7731-433c-9238-b4aa07a4ddda has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1152.882511] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1152.882702] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1152.983968] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.984289] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1152.984457] env[68233]: DEBUG nova.compute.manager [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing instance network info cache due to event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1152.984665] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.985363] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.985363] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.025958] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad766480-a435-468c-91c7-01ef616a279a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.033775] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a5b74e-f09b-445b-9f46-7bae0ea89507 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.063964] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05394ce1-c10f-424d-98dd-fa150092fc00 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.071568] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fd90024-da9b-4f14-93dc-d36f8df6e7aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.084843] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.162265] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.162602] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance network_info: |[{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1153.163031] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:a2:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4077afe-a7b6-4653-be23-4c735d67fa05', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.170544] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1153.170754] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1153.170975] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73d741fc-623e-4c47-b1ab-9d50a62ee860 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.192296] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.192296] env[68233]: value = "task-2783196" [ 1153.192296] env[68233]: _type = "Task" [ 1153.192296] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.199767] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783196, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.231834] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073801} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.232227] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1153.232994] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab605794-0339-4187-b290-0e8a066beafe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.256382] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b/151b16bc-6b78-4527-8571-b07b5ad7db7b.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1153.257031] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a963559c-fcb9-460a-9879-1151f5bb3ea2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.276517] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1153.276517] env[68233]: value = "task-2783197" [ 1153.276517] env[68233]: _type = "Task" [ 1153.276517] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.284640] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783197, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.389404] env[68233]: DEBUG nova.compute.manager [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1153.389558] env[68233]: DEBUG nova.compute.manager [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing instance network info cache due to event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1153.389857] env[68233]: DEBUG oslo_concurrency.lockutils [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.389952] env[68233]: DEBUG oslo_concurrency.lockutils [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.390147] env[68233]: DEBUG nova.network.neutron [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1153.588015] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1153.702643] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783196, 'name': CreateVM_Task, 'duration_secs': 0.451259} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.702843] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1153.703493] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.703661] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.704033] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1153.704293] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0388bc5-2c09-4648-9f5f-470e709c8106 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.706527] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updated VIF entry in instance network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1153.706855] env[68233]: DEBUG nova.network.neutron [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.711681] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1153.711681] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52980ca3-cbbe-5c4a-1c59-6e5bb6a5814d" [ 1153.711681] env[68233]: _type = "Task" [ 1153.711681] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.719664] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52980ca3-cbbe-5c4a-1c59-6e5bb6a5814d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.786816] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783197, 'name': ReconfigVM_Task, 'duration_secs': 0.457117} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.787291] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b/151b16bc-6b78-4527-8571-b07b5ad7db7b.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1153.787757] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f5de505-c411-4f67-9064-a2bbb42539ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.793825] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1153.793825] env[68233]: value = "task-2783198" [ 1153.793825] env[68233]: _type = "Task" [ 1153.793825] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.802294] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783198, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.071037] env[68233]: DEBUG nova.network.neutron [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updated VIF entry in instance network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1154.071805] env[68233]: DEBUG nova.network.neutron [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.094216] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1154.094438] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.258s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.094689] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.720s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.096034] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.096183] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Cleaning up deleted instances {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1154.209772] env[68233]: DEBUG oslo_concurrency.lockutils [req-c777c4a0-dc24-4b43-9c21-e51c75479dee req-05f92ddf-28dd-4b51-822d-f2dd3e5c3e8f service nova] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.225252] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52980ca3-cbbe-5c4a-1c59-6e5bb6a5814d, 'name': SearchDatastore_Task, 'duration_secs': 0.013506} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.225622] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.225908] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.226204] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.226402] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.226633] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1154.226943] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-890ce98a-476f-44ef-931e-9abba7c8378a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.235329] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1154.235641] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1154.236279] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27703b49-f2d8-496f-aa2b-8f4e33a3f0b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.242263] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1154.242263] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b4c288-5eb5-8626-8656-ed09e9b655ce" [ 1154.242263] env[68233]: _type = "Task" [ 1154.242263] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.249836] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b4c288-5eb5-8626-8656-ed09e9b655ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.304569] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783198, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.574340] env[68233]: DEBUG oslo_concurrency.lockutils [req-1d0f14ba-4c3c-47bf-acfb-594c1ac9fb34 req-d5512ae8-cd2f-4765-80be-6ab07b166771 service nova] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.601049] env[68233]: INFO nova.compute.claims [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1154.619625] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] There are 67 instances to clean {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1154.619795] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 550a52f6-e11b-4d34-ad82-9b39d33780d7] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1154.753586] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b4c288-5eb5-8626-8656-ed09e9b655ce, 'name': SearchDatastore_Task, 'duration_secs': 0.0096} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.753586] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-913b86ea-e727-43fd-8137-60c590c28f2f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.757788] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1154.757788] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218dae4-041c-a04f-f08b-e12ac7bc8043" [ 1154.757788] env[68233]: _type = "Task" [ 1154.757788] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.766263] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218dae4-041c-a04f-f08b-e12ac7bc8043, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.803616] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783198, 'name': Rename_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.124819] env[68233]: INFO nova.compute.resource_tracker [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating resource usage from migration 91749297-59a7-44a7-b90c-8e7f6539e7c9 [ 1155.127607] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 0d79ccd0-d24d-4200-9d34-f3a7f44370aa] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1155.256123] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938d710d-09e7-4002-afae-be5f3c0065a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.268638] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5218dae4-041c-a04f-f08b-e12ac7bc8043, 'name': SearchDatastore_Task, 'duration_secs': 0.010409} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.269557] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac26555-2b86-482f-b4c5-421d8e11691e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.272563] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.272822] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1155.273058] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0bc2d692-9d89-4471-ab0d-93f7e71137f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.308214] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8fe020-462e-45d1-af19-26cb9c9499ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.310981] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1155.310981] env[68233]: value = "task-2783199" [ 1155.310981] env[68233]: _type = "Task" [ 1155.310981] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.318771] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783198, 'name': Rename_Task, 'duration_secs': 1.464035} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.319413] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.320597] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2f9e7a-8b67-4465-8d55-e2ce78f96d7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.326646] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed0654f1-c303-4538-b235-29af9e4fc409 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.327981] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.337364] env[68233]: DEBUG nova.compute.provider_tree [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.339975] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1155.339975] env[68233]: value = "task-2783200" [ 1155.339975] env[68233]: _type = "Task" [ 1155.339975] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.347112] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.631354] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 3af7ccd5-f36b-4596-baf6-ed890e89d6a1] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1155.820792] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783199, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.840138] env[68233]: DEBUG nova.scheduler.client.report [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1155.853429] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783200, 'name': PowerOnVM_Task} progress is 71%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.134775] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: ac108b76-385d-40c2-992c-dc7561227130] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1156.321962] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.741615} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.322278] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1156.322441] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1156.322676] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f7bc4aa-cf95-49d2-bb95-2a9c741e5c38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.329658] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1156.329658] env[68233]: value = "task-2783201" [ 1156.329658] env[68233]: _type = "Task" [ 1156.329658] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.336956] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.348896] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.254s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.349127] env[68233]: INFO nova.compute.manager [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Migrating [ 1156.359610] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.686s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.361117] env[68233]: INFO nova.compute.claims [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1156.364092] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783200, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.639334] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: f53dccfc-9d0d-4eea-b94c-8527f707c5c2] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1156.839507] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093987} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.839718] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1156.840504] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50869ae5-b681-408c-aa96-302b8f2759f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.862665] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1156.865960] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7ce2b6c-29fc-41aa-a706-9b2aa51cf870 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.882721] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.882911] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1156.883073] env[68233]: DEBUG nova.network.neutron [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1156.890657] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783200, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.892862] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1156.892862] env[68233]: value = "task-2783202" [ 1156.892862] env[68233]: _type = "Task" [ 1156.892862] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.901678] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783202, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.142696] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 72c7e272-dd92-40a5-875b-3edfa1ad282b] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1157.362772] env[68233]: DEBUG oslo_vmware.api [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783200, 'name': PowerOnVM_Task, 'duration_secs': 1.594088} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.363150] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1157.363200] env[68233]: INFO nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1157.363355] env[68233]: DEBUG nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1157.364107] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6b47ff-6401-45e8-ab16-d92b5c2ea320 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.401567] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.531196] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3457d3b6-a368-4156-bd5b-e3c5583d6af0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.538491] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e3c449-1c07-4628-87dc-7c9c33fd1dc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.570417] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c77030f-3e96-45bb-a8de-fcf361dc62b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.578279] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556bec7b-d52a-4db1-b5ef-658faf332614 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.591609] env[68233]: DEBUG nova.compute.provider_tree [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.645509] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 7025be4e-b800-42c8-a2c0-3ea059d3b929] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1157.787033] env[68233]: DEBUG nova.network.neutron [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance_info_cache with network_info: [{"id": "922f97ce-4e0e-42e9-b56b-1e312580276a", "address": "fa:16:3e:a5:03:5b", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap922f97ce-4e", "ovs_interfaceid": "922f97ce-4e0e-42e9-b56b-1e312580276a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.881012] env[68233]: INFO nova.compute.manager [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Took 22.10 seconds to build instance. [ 1157.902602] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783202, 'name': ReconfigVM_Task, 'duration_secs': 0.82787} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.902902] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1157.903548] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5aedb723-2f2a-4e66-9594-f0eaade48ed4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.909166] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1157.909166] env[68233]: value = "task-2783203" [ 1157.909166] env[68233]: _type = "Task" [ 1157.909166] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.916454] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783203, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.095253] env[68233]: DEBUG nova.scheduler.client.report [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1158.148155] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 21cc2aa9-8c88-4aa1-8847-bf7f469ca991] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1158.289498] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.383481] env[68233]: DEBUG oslo_concurrency.lockutils [None req-78fcab7b-5942-4ed1-bc8a-46f14473d7bb tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.614s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.418846] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783203, 'name': Rename_Task, 'duration_secs': 0.155845} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.419142] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1158.419475] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c901535d-3dc3-46c3-b0b0-95d5ea399242 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.425284] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1158.425284] env[68233]: value = "task-2783204" [ 1158.425284] env[68233]: _type = "Task" [ 1158.425284] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.432733] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.599791] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.241s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.600397] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1158.603187] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.211s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.604480] env[68233]: INFO nova.compute.claims [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1158.650911] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 19cf6f80-ff11-4881-896e-9fc162ded31e] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1158.936580] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.108727] env[68233]: DEBUG nova.compute.utils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1159.113025] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1159.113230] env[68233]: DEBUG nova.network.neutron [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1159.150457] env[68233]: DEBUG nova.policy [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5879d5d831004ae3b4273284da66358d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd564a0ed01a84ffca782d1344faba070', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1159.153582] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 9b7df182-5830-45a2-b50d-b3564a7e0b6c] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1159.431877] env[68233]: DEBUG nova.network.neutron [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Successfully created port: 85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1159.438437] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783204, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.617267] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1159.656689] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: e95e2309-1df5-466b-bb8a-0c9188dc07c2] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1159.700642] env[68233]: DEBUG nova.compute.manager [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1159.782808] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e630f5f5-13f1-4406-a164-2bb1e101dca3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.790536] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ad6655-3a49-43de-9f91-76cf602b94a2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.824238] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd27d81-ca48-447f-9a82-7cea49fe79a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.827248] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096b5e23-9688-4f00-b70c-8ec074e6cc8f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.846517] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18726d5a-eace-4ade-a974-072e30ff1fcf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.850557] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1159.863198] env[68233]: DEBUG nova.compute.provider_tree [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.936431] env[68233]: DEBUG oslo_vmware.api [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783204, 'name': PowerOnVM_Task, 'duration_secs': 1.086903} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.936746] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1159.936975] env[68233]: INFO nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Took 8.82 seconds to spawn the instance on the hypervisor. [ 1159.937199] env[68233]: DEBUG nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1159.938071] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60204f71-7da7-4317-967e-976b60b03ec2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.160384] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 619230c4-f642-4835-8c5a-84ece6610e0f] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1160.219957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.356789] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1160.356846] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74bb5c6e-001c-4456-93d4-dc929918cf03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.364747] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1160.364747] env[68233]: value = "task-2783205" [ 1160.364747] env[68233]: _type = "Task" [ 1160.364747] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.365563] env[68233]: DEBUG nova.scheduler.client.report [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.380970] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.457130] env[68233]: INFO nova.compute.manager [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Took 22.45 seconds to build instance. [ 1160.628063] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1160.652537] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1160.652795] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.652951] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1160.653154] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.654173] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1160.654173] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1160.654173] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1160.654173] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1160.654173] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1160.654404] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1160.654629] env[68233]: DEBUG nova.virt.hardware [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1160.655396] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3268b76b-85c9-4d5b-a409-77e89711e4a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.663871] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e37ad1d-ae28-4658-bead-a616da4cc3fc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.667841] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 5d99e0cb-9742-4a6c-84d0-f8d916ef9104] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1160.876026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.273s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.877033] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1160.879609] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783205, 'name': PowerOffVM_Task, 'duration_secs': 0.220809} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.879881] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.660s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.883017] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1160.883017] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1160.905771] env[68233]: DEBUG nova.network.neutron [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Successfully updated port: 85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1160.959505] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fac21d0c-01ca-4864-ab57-57bd65a9d550 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.957s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.107984] env[68233]: DEBUG nova.compute.manager [req-032381b7-9c98-4b37-9fcc-f00e56fec0bd req-c45dc036-838b-4077-82d5-6163ee30491e service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Received event network-vif-plugged-85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1161.108374] env[68233]: DEBUG oslo_concurrency.lockutils [req-032381b7-9c98-4b37-9fcc-f00e56fec0bd req-c45dc036-838b-4077-82d5-6163ee30491e service nova] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.108616] env[68233]: DEBUG oslo_concurrency.lockutils [req-032381b7-9c98-4b37-9fcc-f00e56fec0bd req-c45dc036-838b-4077-82d5-6163ee30491e service nova] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.108893] env[68233]: DEBUG oslo_concurrency.lockutils [req-032381b7-9c98-4b37-9fcc-f00e56fec0bd req-c45dc036-838b-4077-82d5-6163ee30491e service nova] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.109532] env[68233]: DEBUG nova.compute.manager [req-032381b7-9c98-4b37-9fcc-f00e56fec0bd req-c45dc036-838b-4077-82d5-6163ee30491e service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] No waiting events found dispatching network-vif-plugged-85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1161.109689] env[68233]: WARNING nova.compute.manager [req-032381b7-9c98-4b37-9fcc-f00e56fec0bd req-c45dc036-838b-4077-82d5-6163ee30491e service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Received unexpected event network-vif-plugged-85aae8a7-b4dc-4227-8b64-2e08fa7fa580 for instance with vm_state building and task_state spawning. [ 1161.126633] env[68233]: DEBUG nova.compute.manager [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1161.126633] env[68233]: DEBUG nova.compute.manager [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing instance network info cache due to event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1161.126633] env[68233]: DEBUG oslo_concurrency.lockutils [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.126633] env[68233]: DEBUG oslo_concurrency.lockutils [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.126881] env[68233]: DEBUG nova.network.neutron [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1161.171501] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 4922985d-ad04-4c34-8dcb-6e6f8df94ff9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1161.380799] env[68233]: DEBUG nova.compute.utils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1161.382359] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1161.382530] env[68233]: DEBUG nova.network.neutron [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1161.387522] env[68233]: INFO nova.compute.claims [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1161.396693] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1161.405021] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6992881d-e85a-47ac-b5d5-9e4882d612d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.414831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.414831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.414831] env[68233]: DEBUG nova.network.neutron [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.421426] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1161.421426] env[68233]: value = "task-2783206" [ 1161.421426] env[68233]: _type = "Task" [ 1161.421426] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.432356] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783206, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.443362] env[68233]: DEBUG nova.policy [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da4cb00bd4c3405c88d8616b66b71e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14d2a0ead80a4efba8420023c31f8f11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1161.676884] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 35587446-6f3b-465b-a2a6-0b154374734c] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1161.712027] env[68233]: DEBUG nova.network.neutron [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Successfully created port: f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1161.850466] env[68233]: DEBUG nova.network.neutron [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updated VIF entry in instance network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1161.850826] env[68233]: DEBUG nova.network.neutron [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.893495] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1161.903467] env[68233]: INFO nova.compute.resource_tracker [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating resource usage from migration 6aeb657c-4d3c-4cd3-9a45-8b4449043aa4 [ 1161.932415] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783206, 'name': ReconfigVM_Task, 'duration_secs': 0.201586} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.937070] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1162.081284] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6024557c-1d5f-48cd-b99c-4b7057ff07ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.089088] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340a6735-ace6-45ed-b8b0-7d005dfc15de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.118996] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f889d3b-16ce-4bf1-964f-5fc456ee644e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.126363] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dd987e-eaf5-40b6-82e3-ccd2c6bbd859 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.140784] env[68233]: DEBUG nova.compute.provider_tree [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1162.184092] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 0bde10dc-6762-49fb-9c0d-6b104a3cfa39] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1162.213775] env[68233]: DEBUG nova.network.neutron [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1162.353931] env[68233]: DEBUG oslo_concurrency.lockutils [req-f36f87b2-8554-494d-9ac5-35cecb86a3d4 req-6d0ab3a0-015b-4160-85b1-dd00086eae54 service nova] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.392336] env[68233]: DEBUG nova.network.neutron [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updating instance_info_cache with network_info: [{"id": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "address": "fa:16:3e:40:fa:ec", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85aae8a7-b4", "ovs_interfaceid": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.442372] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1162.443120] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1162.443120] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1162.443120] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1162.443281] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1162.443281] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1162.443980] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1162.443980] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1162.443980] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1162.444218] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1162.444269] env[68233]: DEBUG nova.virt.hardware [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1162.449427] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.449972] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9019e79c-8139-4b56-b89a-a8892438ee8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.469846] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1162.469846] env[68233]: value = "task-2783207" [ 1162.469846] env[68233]: _type = "Task" [ 1162.469846] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.478019] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783207, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.644685] env[68233]: DEBUG nova.scheduler.client.report [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1162.688205] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: a6b913f8-8ce5-4227-b36c-bc191d2e7907] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1162.894638] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.894970] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Instance network_info: |[{"id": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "address": "fa:16:3e:40:fa:ec", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85aae8a7-b4", "ovs_interfaceid": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1162.895464] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:fa:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9ee6f9-33be-4f58-8248-694024ec31d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85aae8a7-b4dc-4227-8b64-2e08fa7fa580', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.902817] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Creating folder: Project (d564a0ed01a84ffca782d1344faba070). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1162.903820] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1162.905944] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6aa7695-94f1-40af-b92a-b851b875b4c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.917938] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Created folder: Project (d564a0ed01a84ffca782d1344faba070) in parent group-v559223. [ 1162.918131] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Creating folder: Instances. Parent ref: group-v559529. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1162.918358] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26e86eb3-9c2e-4cf1-85ed-0a66b96ab4b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.927367] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Created folder: Instances in parent group-v559529. [ 1162.927594] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.927777] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1162.927970] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0d10838-f040-4734-b49a-c5224c4744ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.943988] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1162.944264] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1162.944424] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1162.944605] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1162.944749] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1162.944893] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1162.945140] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1162.945313] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1162.945482] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1162.945641] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1162.945814] env[68233]: DEBUG nova.virt.hardware [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1162.946684] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7a17b1-3437-40d0-83d9-32d82bdc25c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.955043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95acc7d9-c935-439d-af0d-007ef85edfa6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.958860] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.958860] env[68233]: value = "task-2783210" [ 1162.958860] env[68233]: _type = "Task" [ 1162.958860] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.977201] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783210, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.981637] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783207, 'name': ReconfigVM_Task, 'duration_secs': 0.172022} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.981885] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1162.982667] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbea1ca-5570-40ac-9979-f9799e79b332 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.005777] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6/5038002c-884f-4f75-a1fe-aa84220c9ea6.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1163.006068] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b1a545b-ce00-44ef-9c05-0255f18b1c4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.023589] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1163.023589] env[68233]: value = "task-2783211" [ 1163.023589] env[68233]: _type = "Task" [ 1163.023589] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.031231] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.137956] env[68233]: DEBUG nova.compute.manager [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Received event network-changed-85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1163.138232] env[68233]: DEBUG nova.compute.manager [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Refreshing instance network info cache due to event network-changed-85aae8a7-b4dc-4227-8b64-2e08fa7fa580. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1163.138449] env[68233]: DEBUG oslo_concurrency.lockutils [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] Acquiring lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.138589] env[68233]: DEBUG oslo_concurrency.lockutils [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] Acquired lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.138746] env[68233]: DEBUG nova.network.neutron [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Refreshing network info cache for port 85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1163.151272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.271s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.151454] env[68233]: INFO nova.compute.manager [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Migrating [ 1163.157937] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.158168] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.158478] env[68233]: DEBUG nova.objects.instance [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'flavor' on Instance uuid 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.160314] env[68233]: DEBUG nova.network.neutron [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Successfully updated port: f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1163.191876] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: d926386c-8543-4a6e-a782-588680cb5f34] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1163.469299] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783210, 'name': CreateVM_Task} progress is 25%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.532630] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783211, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.664043] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.664043] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.664191] env[68233]: DEBUG nova.network.neutron [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1163.672191] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.672389] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.672562] env[68233]: DEBUG nova.network.neutron [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1163.694278] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 85313d15-04da-4f24-b203-bed5ebcbe1a9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1163.816695] env[68233]: DEBUG nova.objects.instance [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'pci_requests' on Instance uuid 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.911215] env[68233]: DEBUG nova.network.neutron [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updated VIF entry in instance network info cache for port 85aae8a7-b4dc-4227-8b64-2e08fa7fa580. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1163.911573] env[68233]: DEBUG nova.network.neutron [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updating instance_info_cache with network_info: [{"id": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "address": "fa:16:3e:40:fa:ec", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85aae8a7-b4", "ovs_interfaceid": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.968723] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783210, 'name': CreateVM_Task, 'duration_secs': 0.936484} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.968962] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1163.969696] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1163.969952] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1163.970313] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1163.970603] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-883a8b94-c42f-491c-a83d-8a23ff291d4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.974930] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1163.974930] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520eb9fc-1308-6f20-7a62-cda3a730dbad" [ 1163.974930] env[68233]: _type = "Task" [ 1163.974930] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.982080] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520eb9fc-1308-6f20-7a62-cda3a730dbad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.032861] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783211, 'name': ReconfigVM_Task, 'duration_secs': 0.925273} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.033160] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6/5038002c-884f-4f75-a1fe-aa84220c9ea6.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1164.033477] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1164.197299] env[68233]: DEBUG nova.network.neutron [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1164.199232] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 56fb49f0-4b2b-4501-8ded-34dff1278a0c] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1164.319863] env[68233]: DEBUG nova.objects.base [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Object Instance<03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9> lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1164.320084] env[68233]: DEBUG nova.network.neutron [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1164.333129] env[68233]: DEBUG nova.network.neutron [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updating instance_info_cache with network_info: [{"id": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "address": "fa:16:3e:52:2e:6f", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5288ad5-2e", "ovs_interfaceid": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.398245] env[68233]: DEBUG nova.network.neutron [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance_info_cache with network_info: [{"id": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "address": "fa:16:3e:32:28:bf", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0ccb34-9d", "ovs_interfaceid": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.411873] env[68233]: DEBUG nova.policy [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1164.414098] env[68233]: DEBUG oslo_concurrency.lockutils [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] Releasing lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.414326] env[68233]: DEBUG nova.compute.manager [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Received event network-vif-plugged-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1164.414567] env[68233]: DEBUG oslo_concurrency.lockutils [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] Acquiring lock "81e0800d-7731-433c-9238-b4aa07a4ddda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1164.414778] env[68233]: DEBUG oslo_concurrency.lockutils [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1164.414945] env[68233]: DEBUG oslo_concurrency.lockutils [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.415134] env[68233]: DEBUG nova.compute.manager [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] No waiting events found dispatching network-vif-plugged-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1164.415314] env[68233]: WARNING nova.compute.manager [req-db960e26-dcec-4896-aecd-2b83a61a8f95 req-70687b8e-2314-420a-97d0-b326a32168ff service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Received unexpected event network-vif-plugged-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 for instance with vm_state building and task_state spawning. [ 1164.486226] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520eb9fc-1308-6f20-7a62-cda3a730dbad, 'name': SearchDatastore_Task, 'duration_secs': 0.023284} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.486527] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.486757] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1164.487115] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.487228] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.487320] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1164.487573] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a49380a-2c4d-428f-af8b-e56c5b8fc9cd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.508670] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1164.508945] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1164.509786] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd4d1fb3-a9b0-496f-8736-8c4131aab907 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.515789] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1164.515789] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c80073-99a6-5d3a-9859-75e9730909f1" [ 1164.515789] env[68233]: _type = "Task" [ 1164.515789] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.523370] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c80073-99a6-5d3a-9859-75e9730909f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.540530] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6818b3f6-5589-4128-abee-7d256e7c95e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.560100] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27abf9b-f931-40d2-b99f-8251c83dc814 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.578316] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1164.703141] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: edf4bfac-175b-40b7-bf08-298c4735bfae] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1164.827936] env[68233]: DEBUG nova.compute.manager [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1164.836025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.836277] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Instance network_info: |[{"id": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "address": "fa:16:3e:52:2e:6f", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5288ad5-2e", "ovs_interfaceid": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1164.836658] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:2e:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5288ad5-2e52-4994-bfa9-ff2e77f3fe10', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1164.844431] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.844944] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1164.845205] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7f5256a-adf1-4421-98d3-c3af874e71dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.867417] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1164.867417] env[68233]: value = "task-2783212" [ 1164.867417] env[68233]: _type = "Task" [ 1164.867417] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.875595] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783212, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.901101] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.026300] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c80073-99a6-5d3a-9859-75e9730909f1, 'name': SearchDatastore_Task, 'duration_secs': 0.009358} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.027106] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-135b3de7-c684-4a93-9986-9be47eb15ef8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.031948] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1165.031948] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d31102-c2f9-c52d-f3af-9b79799366e9" [ 1165.031948] env[68233]: _type = "Task" [ 1165.031948] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.039105] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d31102-c2f9-c52d-f3af-9b79799366e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.115809] env[68233]: DEBUG nova.network.neutron [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Port 922f97ce-4e0e-42e9-b56b-1e312580276a binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1165.170404] env[68233]: DEBUG nova.compute.manager [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Received event network-changed-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1165.170611] env[68233]: DEBUG nova.compute.manager [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Refreshing instance network info cache due to event network-changed-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1165.170904] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] Acquiring lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.171460] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] Acquired lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.171653] env[68233]: DEBUG nova.network.neutron [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Refreshing network info cache for port f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1165.206421] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 287df4d5-4e98-464d-8f0a-4571c1e4df4f] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1165.348255] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.348543] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.377383] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783212, 'name': CreateVM_Task, 'duration_secs': 0.400568} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.377548] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1165.378198] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.378361] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.378675] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1165.378910] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aaa379b2-dfd9-4c88-bac2-c4eb41789393 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.383319] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1165.383319] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5255d950-0bb5-7397-e8ac-c601cbe653d7" [ 1165.383319] env[68233]: _type = "Task" [ 1165.383319] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.390638] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5255d950-0bb5-7397-e8ac-c601cbe653d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.542226] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d31102-c2f9-c52d-f3af-9b79799366e9, 'name': SearchDatastore_Task, 'duration_secs': 0.011345} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.542486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.542737] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] aadc7dbe-456c-4bf3-b26d-bac672459fb9/aadc7dbe-456c-4bf3-b26d-bac672459fb9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1165.542979] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cffb4a3-9af7-4fb5-ab05-660f36bd7cf6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.549799] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1165.549799] env[68233]: value = "task-2783213" [ 1165.549799] env[68233]: _type = "Task" [ 1165.549799] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.557250] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.709764] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 9c0e581d-5856-470f-a737-301649d701e5] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1165.860363] env[68233]: INFO nova.compute.claims [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1165.900891] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5255d950-0bb5-7397-e8ac-c601cbe653d7, 'name': SearchDatastore_Task, 'duration_secs': 0.019483} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.901664] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.901968] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1165.902232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.904621] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.904621] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1165.904621] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4519d668-d020-42e1-bf2f-8b2f56c67769 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.920396] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1165.920726] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1165.922690] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c44c41bc-75aa-49b0-9233-1167db4d2942 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.932359] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1165.932359] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52358856-4eae-d1de-cda5-0976ac5f4563" [ 1165.932359] env[68233]: _type = "Task" [ 1165.932359] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.943150] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52358856-4eae-d1de-cda5-0976ac5f4563, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.059673] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783213, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.113166] env[68233]: DEBUG nova.network.neutron [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updated VIF entry in instance network info cache for port f5288ad5-2e52-4994-bfa9-ff2e77f3fe10. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1166.113546] env[68233]: DEBUG nova.network.neutron [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updating instance_info_cache with network_info: [{"id": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "address": "fa:16:3e:52:2e:6f", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5288ad5-2e", "ovs_interfaceid": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.136626] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.136859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.137041] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.198964] env[68233]: DEBUG nova.compute.manager [req-fd8ff60f-4954-4c63-b579-c125a2ad5d6b req-2098bc53-5eaa-43ca-aa0d-1e8d712b2e65 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-vif-plugged-9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1166.199199] env[68233]: DEBUG oslo_concurrency.lockutils [req-fd8ff60f-4954-4c63-b579-c125a2ad5d6b req-2098bc53-5eaa-43ca-aa0d-1e8d712b2e65 service nova] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.199468] env[68233]: DEBUG oslo_concurrency.lockutils [req-fd8ff60f-4954-4c63-b579-c125a2ad5d6b req-2098bc53-5eaa-43ca-aa0d-1e8d712b2e65 service nova] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.199727] env[68233]: DEBUG oslo_concurrency.lockutils [req-fd8ff60f-4954-4c63-b579-c125a2ad5d6b req-2098bc53-5eaa-43ca-aa0d-1e8d712b2e65 service nova] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.199727] env[68233]: DEBUG nova.compute.manager [req-fd8ff60f-4954-4c63-b579-c125a2ad5d6b req-2098bc53-5eaa-43ca-aa0d-1e8d712b2e65 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] No waiting events found dispatching network-vif-plugged-9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1166.199941] env[68233]: WARNING nova.compute.manager [req-fd8ff60f-4954-4c63-b579-c125a2ad5d6b req-2098bc53-5eaa-43ca-aa0d-1e8d712b2e65 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received unexpected event network-vif-plugged-9b3396e5-37ec-49f5-9da5-1c9cc423a97e for instance with vm_state active and task_state None. [ 1166.213293] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 62cd066c-5eac-4f07-bf4e-9275fedc7384] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1166.258413] env[68233]: DEBUG nova.network.neutron [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Successfully updated port: 9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1166.369215] env[68233]: INFO nova.compute.resource_tracker [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating resource usage from migration 2700e9c7-07ee-4466-a2cd-1549a06a554f [ 1166.417687] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4896d69c-03da-47e3-b76e-909e48cce3da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.439657] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1166.454240] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52358856-4eae-d1de-cda5-0976ac5f4563, 'name': SearchDatastore_Task, 'duration_secs': 0.055061} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.455136] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59824224-38b1-4e1c-a4b6-38ff1ec75481 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.462021] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1166.462021] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523e48a4-0aaa-ebb9-3d8e-13bb6ca55719" [ 1166.462021] env[68233]: _type = "Task" [ 1166.462021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.468872] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523e48a4-0aaa-ebb9-3d8e-13bb6ca55719, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.559734] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783213, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533555} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.560725] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] aadc7dbe-456c-4bf3-b26d-bac672459fb9/aadc7dbe-456c-4bf3-b26d-bac672459fb9.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1166.560949] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1166.561669] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27561cf4-1b4f-474c-a1b6-a23e4b3cc450 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.564007] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5265facc-aa12-413f-999d-9473ca546804 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.569655] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282a15c2-fa0c-40a9-8fb4-4e1c6d5e89e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.573376] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1166.573376] env[68233]: value = "task-2783214" [ 1166.573376] env[68233]: _type = "Task" [ 1166.573376] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.601940] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06c319f-6bd4-42a3-bdab-bc5a0e4707c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.607140] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783214, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.611722] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fbd808-8809-4c36-83c5-9b32b3d25d97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.616035] env[68233]: DEBUG oslo_concurrency.lockutils [req-e0e9644d-50aa-477f-b9f2-ad25e5e21261 req-30003517-c292-4e86-bd9f-8694a5663d30 service nova] Releasing lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.625799] env[68233]: DEBUG nova.compute.provider_tree [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.716024] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 7831d420-5a0a-4901-b7fe-95307b4b61f0] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1166.763156] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.763352] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.763528] env[68233]: DEBUG nova.network.neutron [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1166.951116] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1166.951513] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-774fbb27-0fbb-40e9-86e3-da665ec61be7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.959223] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1166.959223] env[68233]: value = "task-2783215" [ 1166.959223] env[68233]: _type = "Task" [ 1166.959223] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.970475] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.973927] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523e48a4-0aaa-ebb9-3d8e-13bb6ca55719, 'name': SearchDatastore_Task, 'duration_secs': 0.017809} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.974469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.974520] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 81e0800d-7731-433c-9238-b4aa07a4ddda/81e0800d-7731-433c-9238-b4aa07a4ddda.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1166.974745] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43f29e6b-1e10-4367-803f-8171288d5985 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.983508] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1166.983508] env[68233]: value = "task-2783216" [ 1166.983508] env[68233]: _type = "Task" [ 1166.983508] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.991975] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.083571] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783214, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064559} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.086127] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1167.086127] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7880ee4e-be53-48d6-abf5-bfafe68c68f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.106503] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] aadc7dbe-456c-4bf3-b26d-bac672459fb9/aadc7dbe-456c-4bf3-b26d-bac672459fb9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1167.106787] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d77ff9c-672a-4893-a09b-ec983da80b7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.125986] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1167.125986] env[68233]: value = "task-2783217" [ 1167.125986] env[68233]: _type = "Task" [ 1167.125986] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.130027] env[68233]: DEBUG nova.scheduler.client.report [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1167.138013] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783217, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.175061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.175282] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.175467] env[68233]: DEBUG nova.network.neutron [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1167.220575] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 0b1065c2-7923-4dc4-a64f-be72a7994472] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1167.305218] env[68233]: WARNING nova.network.neutron [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] 5065c922-1b9f-4d7f-8615-b5619dd4fc68 already exists in list: networks containing: ['5065c922-1b9f-4d7f-8615-b5619dd4fc68']. ignoring it [ 1167.469842] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783215, 'name': PowerOffVM_Task, 'duration_secs': 0.290973} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.470126] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.470315] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1167.493426] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783216, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.635416] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.287s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.635559] env[68233]: INFO nova.compute.manager [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Migrating [ 1167.641610] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783217, 'name': ReconfigVM_Task, 'duration_secs': 0.424589} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.643192] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Reconfigured VM instance instance-00000075 to attach disk [datastore2] aadc7dbe-456c-4bf3-b26d-bac672459fb9/aadc7dbe-456c-4bf3-b26d-bac672459fb9.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1167.645616] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c63bbad-713a-4dfa-ad1a-5256e3fb3d09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.659840] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1167.659840] env[68233]: value = "task-2783218" [ 1167.659840] env[68233]: _type = "Task" [ 1167.659840] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.670277] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783218, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.688490] env[68233]: DEBUG nova.network.neutron [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "address": "fa:16:3e:90:68:d0", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3396e5-37", "ovs_interfaceid": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.724212] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 32e05800-e812-412a-b049-89178737cffd] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1167.904578] env[68233]: DEBUG nova.network.neutron [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance_info_cache with network_info: [{"id": "922f97ce-4e0e-42e9-b56b-1e312580276a", "address": "fa:16:3e:a5:03:5b", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap922f97ce-4e", "ovs_interfaceid": "922f97ce-4e0e-42e9-b56b-1e312580276a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.977591] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1167.977591] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1167.977845] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1167.977845] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1167.978611] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1167.978611] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1167.978611] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1167.978611] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1167.978858] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1167.978858] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1167.978961] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1167.984207] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0828bea4-61de-41ab-b8e3-1341663de8d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.006046] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783216, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.617813} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.006046] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 81e0800d-7731-433c-9238-b4aa07a4ddda/81e0800d-7731-433c-9238-b4aa07a4ddda.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1168.006232] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1168.006473] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1168.006473] env[68233]: value = "task-2783219" [ 1168.006473] env[68233]: _type = "Task" [ 1168.006473] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.006621] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9429313a-7712-4315-8bcd-9aa4591e4f9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.017569] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783219, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.018770] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1168.018770] env[68233]: value = "task-2783220" [ 1168.018770] env[68233]: _type = "Task" [ 1168.018770] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.026205] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783220, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.156298] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.156464] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.156649] env[68233]: DEBUG nova.network.neutron [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1168.170984] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783218, 'name': Rename_Task, 'duration_secs': 0.137646} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.171247] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1168.171484] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0833ab8c-41d2-45bf-9ff5-60ec97235197 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.177189] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1168.177189] env[68233]: value = "task-2783221" [ 1168.177189] env[68233]: _type = "Task" [ 1168.177189] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.184668] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.191239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.191911] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.192121] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.192846] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-443b49ab-8b81-4c78-b00c-9084dc831d6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.209790] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1168.210035] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1168.210203] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1168.210384] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1168.210528] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1168.210671] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1168.210873] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1168.211040] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1168.211214] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1168.211376] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1168.211547] env[68233]: DEBUG nova.virt.hardware [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1168.217918] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Reconfiguring VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1168.218278] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64318da5-0bda-4061-81b2-b9d28229d11a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.230730] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 827711ac-ef52-41a0-9029-0a1805522a08] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1168.238897] env[68233]: DEBUG oslo_vmware.api [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1168.238897] env[68233]: value = "task-2783222" [ 1168.238897] env[68233]: _type = "Task" [ 1168.238897] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.247935] env[68233]: DEBUG oslo_vmware.api [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783222, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.308752] env[68233]: DEBUG nova.compute.manager [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-changed-9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1168.308917] env[68233]: DEBUG nova.compute.manager [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing instance network info cache due to event network-changed-9b3396e5-37ec-49f5-9da5-1c9cc423a97e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1168.309155] env[68233]: DEBUG oslo_concurrency.lockutils [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.309297] env[68233]: DEBUG oslo_concurrency.lockutils [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.309458] env[68233]: DEBUG nova.network.neutron [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing network info cache for port 9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.410076] env[68233]: DEBUG oslo_concurrency.lockutils [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.518615] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783219, 'name': ReconfigVM_Task, 'duration_secs': 0.374999} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.519060] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1168.530556] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783220, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068727} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.530821] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1168.531583] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-506cb199-3b23-4edd-9e0d-44ea28a34b72 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.553786] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 81e0800d-7731-433c-9238-b4aa07a4ddda/81e0800d-7731-433c-9238-b4aa07a4ddda.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1168.554086] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5a7e6a7-2f3a-480f-a62b-4d73d43cdb6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.574910] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1168.574910] env[68233]: value = "task-2783223" [ 1168.574910] env[68233]: _type = "Task" [ 1168.574910] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.583769] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783223, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.687087] env[68233]: DEBUG oslo_vmware.api [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783221, 'name': PowerOnVM_Task, 'duration_secs': 0.482348} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.687087] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1168.687087] env[68233]: INFO nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1168.687327] env[68233]: DEBUG nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1168.687860] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751745e1-a7f4-41c6-9442-e302726084b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.734716] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 73ca71c0-34cd-4393-82ff-4b297d350209] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1168.749175] env[68233]: DEBUG oslo_vmware.api [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783222, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.939789] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07db2b1a-f045-41ea-8c13-f286cfd87245 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.963885] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7a9f0f-5edf-48d7-990c-b3a842a28f06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.971031] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1169.027398] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1169.027822] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1169.027822] env[68233]: DEBUG nova.virt.hardware [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1169.033074] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1169.034225] env[68233]: DEBUG nova.network.neutron [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.035691] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c55ee833-41b1-468d-8914-543d49629b69 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.055257] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1169.055257] env[68233]: value = "task-2783224" [ 1169.055257] env[68233]: _type = "Task" [ 1169.055257] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.065964] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783224, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.084099] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783223, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.158606] env[68233]: DEBUG nova.network.neutron [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updated VIF entry in instance network info cache for port 9b3396e5-37ec-49f5-9da5-1c9cc423a97e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1169.159164] env[68233]: DEBUG nova.network.neutron [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "address": "fa:16:3e:90:68:d0", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3396e5-37", "ovs_interfaceid": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.206372] env[68233]: INFO nova.compute.manager [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Took 24.55 seconds to build instance. [ 1169.237624] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 3f79709a-b8b7-4838-8731-d051155ff4f3] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1169.250959] env[68233]: DEBUG oslo_vmware.api [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783222, 'name': ReconfigVM_Task, 'duration_secs': 0.908058} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.251562] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.251857] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Reconfigured VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1169.479802] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1169.480136] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe03c947-646a-4437-9a83-1b00e47836dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.487360] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1169.487360] env[68233]: value = "task-2783225" [ 1169.487360] env[68233]: _type = "Task" [ 1169.487360] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.495071] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.550066] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.564529] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783224, 'name': ReconfigVM_Task, 'duration_secs': 0.234727} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.565434] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1169.566251] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2800dd1d-7857-44e6-b0ac-64f262fa4562 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.590037] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Reconfiguring VM instance instance-00000073 to attach disk [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b/151b16bc-6b78-4527-8571-b07b5ad7db7b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1169.593154] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a1878b2-6b29-4286-adc6-15cc1a8afae2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.610714] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783223, 'name': ReconfigVM_Task, 'duration_secs': 0.599954} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.612054] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 81e0800d-7731-433c-9238-b4aa07a4ddda/81e0800d-7731-433c-9238-b4aa07a4ddda.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1169.612626] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1169.612626] env[68233]: value = "task-2783226" [ 1169.612626] env[68233]: _type = "Task" [ 1169.612626] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.612831] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2434736f-ba39-49dc-9b3c-5ce3afd0a8c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.626289] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783226, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.627463] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1169.627463] env[68233]: value = "task-2783227" [ 1169.627463] env[68233]: _type = "Task" [ 1169.627463] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.635731] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783227, 'name': Rename_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.662017] env[68233]: DEBUG oslo_concurrency.lockutils [req-702477ce-b3ff-4276-8de1-ac23cff74c5f req-bf1d9a68-d7c6-445f-ba92-c416dca76e1e service nova] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.709077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ec88b831-a31c-41a6-a545-26c06fa6e81f tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.065s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.741532] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 1207585c-fb2a-43b7-aec2-c3a7889255a5] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1169.756502] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ae15053b-5f78-4f0b-879f-b616e66ad714 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.598s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.996595] env[68233]: DEBUG oslo_vmware.api [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783225, 'name': PowerOnVM_Task, 'duration_secs': 0.475245} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.996757] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1169.996924] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-facfc299-67ed-440e-af63-0059f262affd tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance '5038002c-884f-4f75-a1fe-aa84220c9ea6' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1170.124470] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783226, 'name': ReconfigVM_Task, 'duration_secs': 0.274199} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.125434] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Reconfigured VM instance instance-00000073 to attach disk [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b/151b16bc-6b78-4527-8571-b07b5ad7db7b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1170.125434] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1170.136587] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783227, 'name': Rename_Task, 'duration_secs': 0.14817} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.136841] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1170.137086] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5134b9dc-8659-4e99-a9d9-a827c68f3d72 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.143112] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1170.143112] env[68233]: value = "task-2783228" [ 1170.143112] env[68233]: _type = "Task" [ 1170.143112] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.150049] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.246076] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 903f0919-b321-4d74-9ea2-bc9771184ded] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1170.334722] env[68233]: DEBUG nova.compute.manager [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Received event network-changed-85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1170.334722] env[68233]: DEBUG nova.compute.manager [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Refreshing instance network info cache due to event network-changed-85aae8a7-b4dc-4227-8b64-2e08fa7fa580. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1170.334907] env[68233]: DEBUG oslo_concurrency.lockutils [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] Acquiring lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1170.335365] env[68233]: DEBUG oslo_concurrency.lockutils [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] Acquired lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1170.335365] env[68233]: DEBUG nova.network.neutron [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Refreshing network info cache for port 85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1170.631801] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20e611a-a5bc-4c13-b70a-9340540f54f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.653840] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-771d28d8-b058-4bb7-ba0b-3e5ae0e090de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.662784] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783228, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.677061] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1170.750136] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 0f813d55-2737-44ae-b62d-3321e77dfdab] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1171.054915] env[68233]: DEBUG nova.network.neutron [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updated VIF entry in instance network info cache for port 85aae8a7-b4dc-4227-8b64-2e08fa7fa580. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1171.055317] env[68233]: DEBUG nova.network.neutron [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updating instance_info_cache with network_info: [{"id": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "address": "fa:16:3e:40:fa:ec", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85aae8a7-b4", "ovs_interfaceid": "85aae8a7-b4dc-4227-8b64-2e08fa7fa580", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1171.063738] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d02a30-ae7d-47da-bc0f-955094fb6159 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.083313] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1171.160037] env[68233]: DEBUG oslo_vmware.api [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783228, 'name': PowerOnVM_Task, 'duration_secs': 0.695448} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.160376] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1171.160620] env[68233]: INFO nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Took 8.26 seconds to spawn the instance on the hypervisor. [ 1171.160830] env[68233]: DEBUG nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1171.161669] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb141b4-290b-46ba-8a32-4e04d65016bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.249661] env[68233]: DEBUG nova.network.neutron [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Port 3f0ccb34-9d4b-457b-8eb8-4110c1b41180 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1171.252758] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: dca145c8-ed95-4dfb-9534-37035c75dafb] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1171.379093] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.379402] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.558149] env[68233]: DEBUG oslo_concurrency.lockutils [req-d7dd4feb-aeda-48aa-8c87-8ceb89812bb8 req-495c157e-3a4f-4c8a-b0ca-b363bc808f60 service nova] Releasing lock "refresh_cache-aadc7dbe-456c-4bf3-b26d-bac672459fb9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.588924] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.589259] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb65b301-b0e1-4cb6-87c3-3c477b5c7eb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.597021] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1171.597021] env[68233]: value = "task-2783229" [ 1171.597021] env[68233]: _type = "Task" [ 1171.597021] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.605644] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783229, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.680838] env[68233]: INFO nova.compute.manager [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Took 21.30 seconds to build instance. [ 1171.756224] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: c5c8bf0c-eb58-41bc-a316-b4ac78187658] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1171.884486] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1171.884687] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1171.885662] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efeeb69-8cb7-4860-a64c-969326c2ff3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.903136] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9520ac99-31c1-40ef-a9d5-55c966c57645 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.929078] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Reconfiguring VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1171.929747] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-456c5d7c-9398-48da-90df-4e417c6124c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.948172] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1171.948172] env[68233]: value = "task-2783230" [ 1171.948172] env[68233]: _type = "Task" [ 1171.948172] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.948704] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.948927] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.949123] env[68233]: DEBUG nova.compute.manager [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Going to confirm migration 5 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1171.960073] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.107564] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783229, 'name': PowerOffVM_Task, 'duration_secs': 0.225042} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.107825] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1172.108012] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1172.183417] env[68233]: DEBUG oslo_concurrency.lockutils [None req-754d5de5-f5a7-4a3c-a9b9-416ad3f61c0d tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.815s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.268022] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 3cca16e1-3363-4026-9359-4ed2ba41e25d] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1172.276925] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.277235] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1172.277448] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.405145] env[68233]: DEBUG nova.compute.manager [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Received event network-changed-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1172.405145] env[68233]: DEBUG nova.compute.manager [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Refreshing instance network info cache due to event network-changed-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1172.405145] env[68233]: DEBUG oslo_concurrency.lockutils [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] Acquiring lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.405145] env[68233]: DEBUG oslo_concurrency.lockutils [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] Acquired lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.405145] env[68233]: DEBUG nova.network.neutron [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Refreshing network info cache for port f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.461123] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1172.615078] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1172.619057] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1172.619057] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1172.619057] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1172.619057] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1172.622554] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62b891bf-35eb-421f-9abf-91466aa4def5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.643053] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1172.643053] env[68233]: value = "task-2783231" [ 1172.643053] env[68233]: _type = "Task" [ 1172.643053] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.649913] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783231, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.655714] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.655714] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.655714] env[68233]: DEBUG nova.network.neutron [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.655714] env[68233]: DEBUG nova.objects.instance [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'info_cache' on Instance uuid 5038002c-884f-4f75-a1fe-aa84220c9ea6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1172.774134] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 4677d047-f8dc-4501-be9b-14e6a2222f46] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1172.958591] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.121482] env[68233]: DEBUG nova.network.neutron [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updated VIF entry in instance network info cache for port f5288ad5-2e52-4994-bfa9-ff2e77f3fe10. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.121844] env[68233]: DEBUG nova.network.neutron [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updating instance_info_cache with network_info: [{"id": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "address": "fa:16:3e:52:2e:6f", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5288ad5-2e", "ovs_interfaceid": "f5288ad5-2e52-4994-bfa9-ff2e77f3fe10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.150274] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783231, 'name': ReconfigVM_Task, 'duration_secs': 0.374275} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.150578] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.278907] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: d0d6eed0-db5b-4371-8f03-b3415fd833f0] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1173.321369] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1173.321599] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1173.321786] env[68233]: DEBUG nova.network.neutron [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1173.459356] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.624752] env[68233]: DEBUG oslo_concurrency.lockutils [req-336745ba-614e-44ed-b142-a767aefb584c req-9ed31c8f-f84f-4d76-a94b-50fad7b61335 service nova] Releasing lock "refresh_cache-81e0800d-7731-433c-9238-b4aa07a4ddda" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.657179] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1173.657470] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1173.657676] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1173.657923] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1173.658132] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1173.658335] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1173.658637] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1173.658831] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1173.659062] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1173.659282] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1173.659515] env[68233]: DEBUG nova.virt.hardware [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1173.667541] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1173.670627] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7f281f6-af30-4e70-b145-a73a5dc6c7ad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.696395] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1173.696395] env[68233]: value = "task-2783233" [ 1173.696395] env[68233]: _type = "Task" [ 1173.696395] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.706862] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.781516] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 22c06baf-6316-4531-8037-b8b77c401596] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1173.959651] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.055707] env[68233]: DEBUG nova.network.neutron [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance_info_cache with network_info: [{"id": "922f97ce-4e0e-42e9-b56b-1e312580276a", "address": "fa:16:3e:a5:03:5b", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap922f97ce-4e", "ovs_interfaceid": "922f97ce-4e0e-42e9-b56b-1e312580276a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.137675] env[68233]: DEBUG nova.network.neutron [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance_info_cache with network_info: [{"id": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "address": "fa:16:3e:32:28:bf", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0ccb34-9d", "ovs_interfaceid": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.205709] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783233, 'name': ReconfigVM_Task, 'duration_secs': 0.400955} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.205921] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1174.206798] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60015094-92ce-478a-975e-d47e335bf2f0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.228763] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1174.229016] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1231589-3f90-4f5b-9b84-7de9a0b29889 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.246627] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1174.246627] env[68233]: value = "task-2783234" [ 1174.246627] env[68233]: _type = "Task" [ 1174.246627] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.253765] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783234, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.285366] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 3d759f4f-3845-4bb5-8cfa-639b7023bb27] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1174.459739] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.558982] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-5038002c-884f-4f75-a1fe-aa84220c9ea6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.559256] env[68233]: DEBUG nova.objects.instance [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'migration_context' on Instance uuid 5038002c-884f-4f75-a1fe-aa84220c9ea6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.640569] env[68233]: DEBUG oslo_concurrency.lockutils [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.757573] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783234, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.788206] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: bb59f959-4cf8-4244-b7b4-6bf630a616b3] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1174.959600] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.063018] env[68233]: DEBUG nova.objects.base [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Object Instance<5038002c-884f-4f75-a1fe-aa84220c9ea6> lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1175.064024] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f103e5f-2bd9-403a-a2bd-d336853485d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.083020] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35808728-fe2f-46b6-b021-21758adac702 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.088126] env[68233]: DEBUG oslo_vmware.api [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1175.088126] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cca2c7-4755-64e0-6a3f-15103832126b" [ 1175.088126] env[68233]: _type = "Task" [ 1175.088126] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.095303] env[68233]: DEBUG oslo_vmware.api [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cca2c7-4755-64e0-6a3f-15103832126b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.166818] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1670f6-b781-40f5-aee7-eba8e7d6b1b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.185356] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73a8686-1966-4c00-aa0b-5c5b5644b906 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.191838] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1175.257106] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783234, 'name': ReconfigVM_Task, 'duration_secs': 0.701526} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.257391] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1175.257670] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1175.290991] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 2c219b8c-813d-4155-af3b-327a7ebd75fc] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1175.460307] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.598821] env[68233]: DEBUG oslo_vmware.api [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52cca2c7-4755-64e0-6a3f-15103832126b, 'name': SearchDatastore_Task, 'duration_secs': 0.006478} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.599128] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1175.599386] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.698619] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1175.698924] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43d964d1-7783-4c93-8bc3-0644072ecc56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.706608] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1175.706608] env[68233]: value = "task-2783235" [ 1175.706608] env[68233]: _type = "Task" [ 1175.706608] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.714572] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.763728] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d19104f-3887-48a3-98df-2d917d808769 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.782612] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7dc77d-020d-4ea3-8e2b-8f718f5d8199 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.799627] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: c5b42243-878f-4150-a5d3-63d69e474bd1] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1175.801431] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1175.960947] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.218787] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.299264] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b77b772-4420-44fa-941a-b3985c376230 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.307396] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 8880bb83-56f1-4ad2-9d6d-1885826aed21] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1176.315880] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fa0b81-1427-4661-896e-c7b284fd05f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.354231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abae42b9-268f-44ff-8f04-e9803ae611ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.361989] env[68233]: DEBUG nova.network.neutron [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Port 4ae388e9-417d-4206-9e31-b91986ba0652 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1176.364207] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d021627-71bd-4470-b530-04415de1f728 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.380366] env[68233]: DEBUG nova.compute.provider_tree [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.463659] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.716714] env[68233]: DEBUG oslo_vmware.api [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783235, 'name': PowerOnVM_Task, 'duration_secs': 0.641604} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.716985] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1176.717217] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-4be04030-b194-4e4a-b737-6495c0d16d21 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance '151b16bc-6b78-4527-8571-b07b5ad7db7b' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1176.815732] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 03688e90-5433-47ca-baaa-75861ad093b7] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1176.884080] env[68233]: DEBUG nova.scheduler.client.report [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.962068] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.319569] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 4cd6a904-ab49-4cfb-bf57-2d6c46c7ae98] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1177.389522] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.389755] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.389929] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.464741] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.823074] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 3d94d1b6-ba04-407d-9398-d4f7b21a7ee1] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1177.896517] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.297s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.963810] env[68233]: DEBUG oslo_vmware.api [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783230, 'name': ReconfigVM_Task, 'duration_secs': 5.885592} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.964071] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.964320] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Reconfigured VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1178.326096] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: da2a5acb-0861-4225-a6b4-324482c480ea] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1178.442540] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.442540] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.442540] env[68233]: DEBUG nova.network.neutron [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1178.461935] env[68233]: INFO nova.scheduler.client.report [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocation for migration 91749297-59a7-44a7-b90c-8e7f6539e7c9 [ 1178.808058] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.808323] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.808516] env[68233]: DEBUG nova.compute.manager [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Going to confirm migration 6 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1178.830500] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 16f20fab-ccf8-4a47-ae7d-9ab55932c5c9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1178.969746] env[68233]: DEBUG oslo_concurrency.lockutils [None req-651e7a73-6a00-4926-a96f-bcf44ce23f4d tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.020s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.247274] env[68233]: DEBUG nova.network.neutron [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1179.286214] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.286329] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.286898] env[68233]: DEBUG nova.network.neutron [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.332610] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 07c7d125-d689-4499-aa4a-b9d3441c6fd0] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1179.337250] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.337546] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.337768] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.337959] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.339700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.340398] env[68233]: INFO nova.compute.manager [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Terminating instance [ 1179.346988] env[68233]: DEBUG nova.compute.manager [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1179.346988] env[68233]: DEBUG nova.compute.manager [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing instance network info cache due to event network-changed-1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1179.346988] env[68233]: DEBUG oslo_concurrency.lockutils [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] Acquiring lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.353028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.353028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquired lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.353028] env[68233]: DEBUG nova.network.neutron [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.353028] env[68233]: DEBUG nova.objects.instance [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'info_cache' on Instance uuid 151b16bc-6b78-4527-8571-b07b5ad7db7b {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.750485] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.841130] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 64b8997c-3246-4c97-a6c9-3a6a23645d38] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1179.843318] env[68233]: DEBUG nova.compute.manager [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1179.844028] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1179.844709] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3815545-899c-4310-a150-3aa526f6c788 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.853549] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1179.853844] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f91b36d7-dbbc-429c-b46e-4b6e84b2af64 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.864792] env[68233]: DEBUG oslo_vmware.api [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1179.864792] env[68233]: value = "task-2783238" [ 1179.864792] env[68233]: _type = "Task" [ 1179.864792] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.873653] env[68233]: DEBUG oslo_vmware.api [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.907110] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "ffc57efd-d031-4b09-8255-2498f01e8c78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.907370] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.991873] env[68233]: INFO nova.network.neutron [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Port 9b3396e5-37ec-49f5-9da5-1c9cc423a97e from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1179.992227] env[68233]: DEBUG nova.network.neutron [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.031581] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-171da032-9aeb-4972-8ec7-4181e2667ac0-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.031879] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-171da032-9aeb-4972-8ec7-4181e2667ac0-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.032346] env[68233]: DEBUG nova.objects.instance [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'flavor' on Instance uuid 171da032-9aeb-4972-8ec7-4181e2667ac0 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.275551] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c55c01c-05e3-4432-91ae-1d2a397b6594 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.307160] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cfe481-602d-4f6a-ab97-9625756468f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.318082] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1180.345169] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 28af332b-4f9b-4474-afdc-ab17e92df6e7] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1180.375066] env[68233]: DEBUG oslo_vmware.api [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783238, 'name': PowerOffVM_Task, 'duration_secs': 0.230194} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.376014] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1180.376227] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1180.376461] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ecb6550-f57b-4932-a93f-e7057bf8f9bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.410418] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1180.446361] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1180.446580] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1180.446804] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] 5038002c-884f-4f75-a1fe-aa84220c9ea6 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1180.447110] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-05e801c9-a90d-4e8f-b342-19e5a9afe7f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.455683] env[68233]: DEBUG oslo_vmware.api [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1180.455683] env[68233]: value = "task-2783241" [ 1180.455683] env[68233]: _type = "Task" [ 1180.455683] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.468169] env[68233]: DEBUG oslo_vmware.api [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.495191] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1180.497991] env[68233]: DEBUG oslo_concurrency.lockutils [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] Acquired lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1180.502022] env[68233]: DEBUG nova.network.neutron [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Refreshing network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1180.625206] env[68233]: DEBUG nova.network.neutron [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance_info_cache with network_info: [{"id": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "address": "fa:16:3e:32:28:bf", "network": {"id": "48fb8714-0340-4604-b312-0df0be86fb5b", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1050313892-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d32ae322ad5641b4bebd1aa390b5914f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f0ccb34-9d", "ovs_interfaceid": "3f0ccb34-9d4b-457b-8eb8-4110c1b41180", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.674643] env[68233]: DEBUG nova.objects.instance [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'pci_requests' on Instance uuid 171da032-9aeb-4972-8ec7-4181e2667ac0 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.826135] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.826556] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44c7bffc-8cde-4874-94e6-cb81017bca97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.834424] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1180.834424] env[68233]: value = "task-2783242" [ 1180.834424] env[68233]: _type = "Task" [ 1180.834424] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.842650] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783242, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.849032] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 48270554-abe4-4f72-b8b9-5f2de6a9ed26] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1180.932495] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.932817] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.934693] env[68233]: INFO nova.compute.claims [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1180.965789] env[68233]: DEBUG oslo_vmware.api [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220602} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.966095] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1180.966255] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1180.966433] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1180.966609] env[68233]: INFO nova.compute.manager [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1180.966890] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1180.967102] env[68233]: DEBUG nova.compute.manager [-] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1180.967198] env[68233]: DEBUG nova.network.neutron [-] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1181.003637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b9fb28ca-ce5d-4a67-b10a-1e4732b00fd1 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.624s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.128771] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Releasing lock "refresh_cache-151b16bc-6b78-4527-8571-b07b5ad7db7b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.129106] env[68233]: DEBUG nova.objects.instance [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lazy-loading 'migration_context' on Instance uuid 151b16bc-6b78-4527-8571-b07b5ad7db7b {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.177434] env[68233]: DEBUG nova.objects.base [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Object Instance<171da032-9aeb-4972-8ec7-4181e2667ac0> lazy-loaded attributes: flavor,pci_requests {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1181.177674] env[68233]: DEBUG nova.network.neutron [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1181.284713] env[68233]: DEBUG nova.policy [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f1080de33eba4a2da2aed8828e60fab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74638e02258142a1a5170178faabb0ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1181.331857] env[68233]: DEBUG nova.network.neutron [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updated VIF entry in instance network info cache for port 1d10db7a-f783-4b60-b20a-834d68367b3c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1181.332344] env[68233]: DEBUG nova.network.neutron [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [{"id": "1d10db7a-f783-4b60-b20a-834d68367b3c", "address": "fa:16:3e:83:8c:f4", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d10db7a-f7", "ovs_interfaceid": "1d10db7a-f783-4b60-b20a-834d68367b3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1181.345382] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783242, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.352667] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 990e1a66-f2ab-4925-b1da-58cdc41a6315] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1181.632060] env[68233]: DEBUG nova.objects.base [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Object Instance<151b16bc-6b78-4527-8571-b07b5ad7db7b> lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1181.633076] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255f7978-4d3f-43f2-9020-3de738697a76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.654250] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04779cc1-e246-42c1-9b33-d87f7740b23c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.662869] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1181.662869] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52110e35-a1ee-a706-3b5b-20b1632002cd" [ 1181.662869] env[68233]: _type = "Task" [ 1181.662869] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.671275] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52110e35-a1ee-a706-3b5b-20b1632002cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.839401] env[68233]: DEBUG oslo_concurrency.lockutils [req-bd47a3f4-b0f6-4fce-832c-c936e828d1bd req-12060489-b10f-4c0f-93d0-1bf083e6255f service nova] Releasing lock "refresh_cache-03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.844283] env[68233]: DEBUG oslo_vmware.api [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783242, 'name': PowerOnVM_Task, 'duration_secs': 0.832438} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.844611] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.844791] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a288d013-3d11-4416-85b9-db5098ae4a79 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance '9f862347-508b-4c8a-a338-97972b0c0b0b' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1181.855397] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 72467d49-6fa8-42db-871e-4e50e77eedf7] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1181.865974] env[68233]: DEBUG nova.network.neutron [-] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.091251] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e3ce1b-4e25-46ca-8e33-bc58e6523494 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.098565] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60cb20d-2e1c-483e-9441-ed2375e7fdba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.129150] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb973d76-5e07-410a-946b-118f3c942d55 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.135604] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8d2eb8-215b-4a64-a580-12cb33f2826a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.148183] env[68233]: DEBUG nova.compute.provider_tree [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1182.171272] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52110e35-a1ee-a706-3b5b-20b1632002cd, 'name': SearchDatastore_Task, 'duration_secs': 0.013986} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.171523] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.358291] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 13972b73-8bae-4a2a-a987-b6177381e7c8] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1182.369529] env[68233]: INFO nova.compute.manager [-] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Took 1.40 seconds to deallocate network for instance. [ 1182.650893] env[68233]: DEBUG nova.scheduler.client.report [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.861586] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: abdf9de2-8563-4a31-91a3-0c18b0387533] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1182.876607] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1182.887156] env[68233]: DEBUG nova.network.neutron [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Successfully updated port: 9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1183.155746] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.223s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.156257] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1183.158750] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.987s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.365156] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 876d428d-d5c9-422a-aba2-2d6c61b092db] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1183.388839] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.389036] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.389239] env[68233]: DEBUG nova.network.neutron [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.662721] env[68233]: DEBUG nova.compute.utils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1183.666847] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1183.667033] env[68233]: DEBUG nova.network.neutron [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1183.728168] env[68233]: DEBUG nova.policy [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65225f2affe34ceda9a265989bddfc9f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74a353ea173c4b8bb74b84032d4e12b0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1183.841045] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71da491-20ac-4dcf-8dca-e4c09bf74e1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.850176] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484b6863-b548-4477-b7f0-6a742d76745d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.879942] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: f7a1bfc5-7141-4764-b3fe-08d06020209a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1183.882955] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f084162-b942-4739-8e06-adbd9bafc16c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.891312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-945664e6-07b8-49fa-a1e9-09616f84147c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.905994] env[68233]: DEBUG nova.compute.provider_tree [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1183.936471] env[68233]: WARNING nova.network.neutron [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] 5065c922-1b9f-4d7f-8615-b5619dd4fc68 already exists in list: networks containing: ['5065c922-1b9f-4d7f-8615-b5619dd4fc68']. ignoring it [ 1184.001995] env[68233]: DEBUG nova.network.neutron [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Successfully created port: 7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1184.167661] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1184.228565] env[68233]: DEBUG nova.network.neutron [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "address": "fa:16:3e:90:68:d0", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3396e5-37", "ovs_interfaceid": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.383293] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: f2af60e6-496c-4edb-9e99-4b45fa94bfeb] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1184.410094] env[68233]: DEBUG nova.scheduler.client.report [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.731355] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.731990] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.732166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.733038] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d065de1-955f-4009-bdc4-0042291d0f43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.750092] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1184.750323] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1184.750480] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1184.750823] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1184.750823] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1184.750936] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1184.751146] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1184.751304] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1184.751484] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1184.751702] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1184.751904] env[68233]: DEBUG nova.virt.hardware [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1184.758351] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Reconfiguring VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1184.758351] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-805d9ccc-eaad-4edb-a995-f82c967f8725 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.774642] env[68233]: DEBUG oslo_vmware.api [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1184.774642] env[68233]: value = "task-2783244" [ 1184.774642] env[68233]: _type = "Task" [ 1184.774642] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.782612] env[68233]: DEBUG oslo_vmware.api [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783244, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.888021] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 6ceb7d2d-143a-464a-aca5-6b6838630bb8] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1185.177802] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1185.205145] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1185.205473] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1185.205646] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1185.205830] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1185.205977] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1185.206137] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1185.206344] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1185.206527] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1185.206731] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1185.206927] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1185.207113] env[68233]: DEBUG nova.virt.hardware [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1185.207955] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d25980-26e2-43df-a6bc-5f304deefe4f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.216617] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da694f9f-5af3-44f1-8e31-a8ac9b163134 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.284887] env[68233]: DEBUG oslo_vmware.api [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783244, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.391885] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: c6a358b7-0e6a-43bb-a171-5e6175f947bd] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1185.419652] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.261s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.422588] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.546s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.422803] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.446155] env[68233]: INFO nova.scheduler.client.report [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocations for instance 5038002c-884f-4f75-a1fe-aa84220c9ea6 [ 1185.468783] env[68233]: DEBUG nova.network.neutron [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Successfully updated port: 7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1185.785250] env[68233]: DEBUG oslo_vmware.api [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783244, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.894565] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 2812bf7c-5117-4fd9-9330-0cc94277bf5d] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1185.955497] env[68233]: DEBUG oslo_concurrency.lockutils [None req-bafe12c3-49b9-457f-857f-93225727a01e tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "5038002c-884f-4f75-a1fe-aa84220c9ea6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.618s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.971136] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "refresh_cache-ffc57efd-d031-4b09-8255-2498f01e8c78" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.971312] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "refresh_cache-ffc57efd-d031-4b09-8255-2498f01e8c78" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.971496] env[68233]: DEBUG nova.network.neutron [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.974928] env[68233]: INFO nova.scheduler.client.report [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocation for migration 6aeb657c-4d3c-4cd3-9a45-8b4449043aa4 [ 1186.287295] env[68233]: DEBUG oslo_vmware.api [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783244, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.399065] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 11ec9800-fa7e-4dbd-bdc1-63d0b496589f] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1186.479886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.671s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1186.504902] env[68233]: DEBUG nova.network.neutron [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1186.638354] env[68233]: DEBUG nova.network.neutron [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Updating instance_info_cache with network_info: [{"id": "7975654b-2d0d-4348-8291-fd80199f2558", "address": "fa:16:3e:cd:68:07", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7975654b-2d", "ovs_interfaceid": "7975654b-2d0d-4348-8291-fd80199f2558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.786794] env[68233]: DEBUG oslo_vmware.api [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783244, 'name': ReconfigVM_Task, 'duration_secs': 1.631003} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.860220] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.860467] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Reconfigured VM to attach interface {{(pid=68233) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1186.901473] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 87385201-3118-4a8e-9739-db3b431566c5] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1187.140947] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "refresh_cache-ffc57efd-d031-4b09-8255-2498f01e8c78" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.141236] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Instance network_info: |[{"id": "7975654b-2d0d-4348-8291-fd80199f2558", "address": "fa:16:3e:cd:68:07", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7975654b-2d", "ovs_interfaceid": "7975654b-2d0d-4348-8291-fd80199f2558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1187.141687] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:68:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7975654b-2d0d-4348-8291-fd80199f2558', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1187.149307] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1187.149501] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1187.149749] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f01bb49-31e1-47cc-83c2-669690649fca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.168876] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1187.168876] env[68233]: value = "task-2783246" [ 1187.168876] env[68233]: _type = "Task" [ 1187.168876] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.176444] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783246, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.364909] env[68233]: DEBUG oslo_concurrency.lockutils [None req-83a33eda-cec8-400c-9c10-c499b115de90 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-171da032-9aeb-4972-8ec7-4181e2667ac0-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.333s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.404609] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 4a388705-7e00-45dc-8891-c6e587b1cdb8] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1187.647945] env[68233]: DEBUG nova.compute.manager [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1187.648195] env[68233]: DEBUG nova.compute.manager [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing instance network info cache due to event network-changed-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1187.648376] env[68233]: DEBUG oslo_concurrency.lockutils [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.648516] env[68233]: DEBUG oslo_concurrency.lockutils [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.648677] env[68233]: DEBUG nova.network.neutron [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1187.679757] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783246, 'name': CreateVM_Task, 'duration_secs': 0.298382} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.679955] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1187.680701] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1187.680866] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1187.681203] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1187.681504] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45a18b5a-a9eb-4766-bc92-9b973457116b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.686845] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1187.686845] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240472c-3174-1dd5-cd47-bfc1765be33b" [ 1187.686845] env[68233]: _type = "Task" [ 1187.686845] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.692011] env[68233]: DEBUG nova.compute.manager [req-aaf1fc93-ad67-45f5-8d0f-fa01d936558a req-c7e61d2b-5428-407f-a358-81a8ac932da3 service nova] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Received event network-vif-deleted-922f97ce-4e0e-42e9-b56b-1e312580276a {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1187.697432] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240472c-3174-1dd5-cd47-bfc1765be33b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.831433] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.831677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.831883] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.832074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1187.832245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.834357] env[68233]: INFO nova.compute.manager [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Terminating instance [ 1187.907742] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 2f797cb5-19d8-4b4d-a78d-ace0a3d1f37f] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1188.196683] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240472c-3174-1dd5-cd47-bfc1765be33b, 'name': SearchDatastore_Task, 'duration_secs': 0.010624} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.197111] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1188.197355] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1188.197585] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.197730] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.197910] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1188.198191] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5528848e-03ca-471c-9c04-c2c8efef1b15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.206562] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1188.206758] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1188.207471] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d61c4349-3454-43c0-928d-c6e90c792b19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.213527] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1188.213527] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5246b88e-45ec-5acd-b61a-b97883e33016" [ 1188.213527] env[68233]: _type = "Task" [ 1188.213527] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.224661] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5246b88e-45ec-5acd-b61a-b97883e33016, 'name': SearchDatastore_Task, 'duration_secs': 0.008846} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.225319] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64c0b1ec-dc09-44bf-aa02-a7f0751f8133 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.230495] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1188.230495] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52463b2f-a722-7a20-3593-693b830f3b7f" [ 1188.230495] env[68233]: _type = "Task" [ 1188.230495] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.238638] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52463b2f-a722-7a20-3593-693b830f3b7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.337408] env[68233]: DEBUG nova.compute.manager [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1188.337632] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1188.338534] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d82123-ce13-4a00-b826-be1bba300bfe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.346519] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1188.346742] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83eff5ff-53ba-4b9b-a300-9f40cfee00a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.352695] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1188.352695] env[68233]: value = "task-2783247" [ 1188.352695] env[68233]: _type = "Task" [ 1188.352695] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.359745] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.369801] env[68233]: DEBUG nova.network.neutron [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updated VIF entry in instance network info cache for port d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1188.370258] env[68233]: DEBUG nova.network.neutron [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "address": "fa:16:3e:90:68:d0", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3396e5-37", "ovs_interfaceid": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.472866] env[68233]: DEBUG nova.network.neutron [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Port 4ae388e9-417d-4206-9e31-b91986ba0652 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1188.473221] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.473420] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.473599] env[68233]: DEBUG nova.network.neutron [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.477869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.478148] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.704065] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "interface-171da032-9aeb-4972-8ec7-4181e2667ac0-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.704365] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-171da032-9aeb-4972-8ec7-4181e2667ac0-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.431395] env[68233]: DEBUG oslo_concurrency.lockutils [req-3a24a8dc-d0d0-4709-aab2-7dd660b16410 req-54054319-5fae-4c7b-86fc-c18868950cf6 service nova] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.433905] env[68233]: INFO nova.compute.manager [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Detaching volume ccfad3f4-1e81-405b-8396-09efc21a5cb3 [ 1189.435574] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.435733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.440622] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc60ef12-9ddb-42be-bf2b-6e0780c53630 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.449374] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52463b2f-a722-7a20-3593-693b830f3b7f, 'name': SearchDatastore_Task, 'duration_secs': 0.008496} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.449625] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.449869] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ffc57efd-d031-4b09-8255-2498f01e8c78/ffc57efd-d031-4b09-8255-2498f01e8c78.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1189.450161] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07d29014-a2b8-43ee-8624-f6446dc0c675 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.465554] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783247, 'name': PowerOffVM_Task, 'duration_secs': 0.188045} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.466551] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf6d7f6-0348-4447-b69e-873ca136928a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.468796] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1189.468963] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1189.469191] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c1d01cc2-4ec4-45c3-9c1c-3ddf1e751872 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.471706] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1189.471706] env[68233]: value = "task-2783249" [ 1189.471706] env[68233]: _type = "Task" [ 1189.471706] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.475825] env[68233]: INFO nova.virt.block_device [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Attempting to driver detach volume ccfad3f4-1e81-405b-8396-09efc21a5cb3 from mountpoint /dev/sdb [ 1189.476029] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1189.476212] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559525', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'name': 'volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2d04b37-3eae-46cb-a227-b62d36c62a6a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'serial': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1189.492149] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e42e89-f60a-480d-8baa-1e0be6dde7f2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.500139] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Reconfiguring VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1189.502872] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79019ff5-0529-4aba-8674-dc79e8803ff2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.519827] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.540495] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69d0f21-2b12-46c1-b915-9280f9422d1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.543208] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1189.543393] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1189.543569] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleting the datastore file [datastore2] 151b16bc-6b78-4527-8571-b07b5ad7db7b {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1189.543861] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1189.543861] env[68233]: value = "task-2783251" [ 1189.543861] env[68233]: _type = "Task" [ 1189.543861] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.544057] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c5bbf7b-6c3e-4395-a22c-39f3216d304c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.551350] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8676ac8c-4c85-4c0d-99b7-43711d57f328 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.557225] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for the task: (returnval){ [ 1189.557225] env[68233]: value = "task-2783252" [ 1189.557225] env[68233]: _type = "Task" [ 1189.557225] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.557422] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.579759] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3c403c-a455-47a4-8210-1a92c4e01cb2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.584943] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.597212] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] The volume has not been displaced from its original location: [datastore2] volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3/volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1189.602192] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1189.602484] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ed46eb0-dd13-411c-aa70-2c5291210543 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.621873] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1189.621873] env[68233]: value = "task-2783253" [ 1189.621873] env[68233]: _type = "Task" [ 1189.621873] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.629852] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783253, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.690475] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-vif-plugged-9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1189.690733] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.691143] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.691143] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.691226] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] No waiting events found dispatching network-vif-plugged-9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1189.691383] env[68233]: WARNING nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received unexpected event network-vif-plugged-9b3396e5-37ec-49f5-9da5-1c9cc423a97e for instance with vm_state active and task_state None. [ 1189.691589] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-changed-9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1189.691684] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing instance network info cache due to event network-changed-9b3396e5-37ec-49f5-9da5-1c9cc423a97e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1189.691861] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.691990] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.692155] env[68233]: DEBUG nova.network.neutron [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Refreshing network info cache for port 9b3396e5-37ec-49f5-9da5-1c9cc423a97e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.739632] env[68233]: DEBUG nova.network.neutron [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.981548] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462093} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.981855] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] ffc57efd-d031-4b09-8255-2498f01e8c78/ffc57efd-d031-4b09-8255-2498f01e8c78.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1189.982092] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1189.982353] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16c7fec3-f834-45e2-9ea0-7724a2dea3e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.987988] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1189.987988] env[68233]: value = "task-2783254" [ 1189.987988] env[68233]: _type = "Task" [ 1189.987988] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.996639] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783254, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.055426] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.069029] env[68233]: DEBUG oslo_vmware.api [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Task: {'id': task-2783252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.441901} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.069029] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.069029] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1190.069029] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1190.069029] env[68233]: INFO nova.compute.manager [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Took 1.73 seconds to destroy the instance on the hypervisor. [ 1190.069029] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.069029] env[68233]: DEBUG nova.compute.manager [-] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1190.069029] env[68233]: DEBUG nova.network.neutron [-] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1190.130636] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783253, 'name': ReconfigVM_Task, 'duration_secs': 0.472965} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.130915] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1190.135380] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4c4a26b-94aa-482e-ab4b-81c945515827 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.149330] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1190.149330] env[68233]: value = "task-2783255" [ 1190.149330] env[68233]: _type = "Task" [ 1190.149330] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.158011] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783255, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.242958] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.412600] env[68233]: DEBUG nova.network.neutron [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updated VIF entry in instance network info cache for port 9b3396e5-37ec-49f5-9da5-1c9cc423a97e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1190.413105] env[68233]: DEBUG nova.network.neutron [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "address": "fa:16:3e:90:68:d0", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9b3396e5-37", "ovs_interfaceid": "9b3396e5-37ec-49f5-9da5-1c9cc423a97e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.496974] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783254, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05896} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.497314] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1190.497966] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa839c5-e5c9-4776-93bf-269ddbc84728 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.518693] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] ffc57efd-d031-4b09-8255-2498f01e8c78/ffc57efd-d031-4b09-8255-2498f01e8c78.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1190.518906] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3eff4415-3818-449c-8dfa-530e4929f5a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.537861] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1190.537861] env[68233]: value = "task-2783256" [ 1190.537861] env[68233]: _type = "Task" [ 1190.537861] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.547828] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783256, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.556525] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.659069] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.746460] env[68233]: DEBUG nova.compute.manager [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68233) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1190.746707] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.747020] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.818814] env[68233]: DEBUG nova.network.neutron [-] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.915918] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.916196] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Received event network-vif-plugged-7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1190.916400] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Acquiring lock "ffc57efd-d031-4b09-8255-2498f01e8c78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.916603] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.916777] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.916952] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] No waiting events found dispatching network-vif-plugged-7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1190.917138] env[68233]: WARNING nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Received unexpected event network-vif-plugged-7975654b-2d0d-4348-8291-fd80199f2558 for instance with vm_state building and task_state spawning. [ 1190.917300] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Received event network-changed-7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1190.917453] env[68233]: DEBUG nova.compute.manager [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Refreshing instance network info cache due to event network-changed-7975654b-2d0d-4348-8291-fd80199f2558. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1190.917663] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Acquiring lock "refresh_cache-ffc57efd-d031-4b09-8255-2498f01e8c78" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.917824] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Acquired lock "refresh_cache-ffc57efd-d031-4b09-8255-2498f01e8c78" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.917981] env[68233]: DEBUG nova.network.neutron [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Refreshing network info cache for port 7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.048483] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783256, 'name': ReconfigVM_Task, 'duration_secs': 0.27016} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.051516] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Reconfigured VM instance instance-00000077 to attach disk [datastore2] ffc57efd-d031-4b09-8255-2498f01e8c78/ffc57efd-d031-4b09-8255-2498f01e8c78.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1191.052200] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-207b73f0-3925-4018-9ba0-eec180f7bc5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.059147] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.060351] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1191.060351] env[68233]: value = "task-2783258" [ 1191.060351] env[68233]: _type = "Task" [ 1191.060351] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.068096] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783258, 'name': Rename_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.159968] env[68233]: DEBUG oslo_vmware.api [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783255, 'name': ReconfigVM_Task, 'duration_secs': 1.009878} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.160291] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559525', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'name': 'volume-ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c2d04b37-3eae-46cb-a227-b62d36c62a6a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3', 'serial': 'ccfad3f4-1e81-405b-8396-09efc21a5cb3'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1191.249979] env[68233]: DEBUG nova.objects.instance [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'migration_context' on Instance uuid 9f862347-508b-4c8a-a338-97972b0c0b0b {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.320086] env[68233]: INFO nova.compute.manager [-] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Took 1.25 seconds to deallocate network for instance. [ 1191.558018] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.568866] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783258, 'name': Rename_Task, 'duration_secs': 0.149191} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.569137] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1191.569372] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-38b161c5-c687-4e25-b2cf-53395409c970 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.576988] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1191.576988] env[68233]: value = "task-2783259" [ 1191.576988] env[68233]: _type = "Task" [ 1191.576988] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.584044] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783259, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.601293] env[68233]: DEBUG nova.network.neutron [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Updated VIF entry in instance network info cache for port 7975654b-2d0d-4348-8291-fd80199f2558. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1191.601634] env[68233]: DEBUG nova.network.neutron [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Updating instance_info_cache with network_info: [{"id": "7975654b-2d0d-4348-8291-fd80199f2558", "address": "fa:16:3e:cd:68:07", "network": {"id": "ac81fe8a-2ed1-4bd8-b7ad-e9a05b2fa88e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-534720852-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74a353ea173c4b8bb74b84032d4e12b0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7975654b-2d", "ovs_interfaceid": "7975654b-2d0d-4348-8291-fd80199f2558", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.702779] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.702904] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.703026] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.703106] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.703255] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.703403] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.703537] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1191.707280] env[68233]: DEBUG nova.objects.instance [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1191.718245] env[68233]: DEBUG nova.compute.manager [req-7defd06b-b7d0-467b-8157-83909ba333b4 req-e30be35f-cdd5-4296-8936-46ccc38f3477 service nova] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Received event network-vif-deleted-3f0ccb34-9d4b-457b-8eb8-4110c1b41180 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1191.828840] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1191.890625] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8addfe97-49f5-4086-ac78-040b8fe2dc23 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.898491] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b9091b-2c24-4821-b293-ddc68e96dbfd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.928063] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a67181-81a1-43fe-8c35-7014fcfc81a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.935563] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597597c1-321d-4d6b-baac-3b53b51b08c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.950117] env[68233]: DEBUG nova.compute.provider_tree [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1192.059122] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.085766] env[68233]: DEBUG oslo_vmware.api [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783259, 'name': PowerOnVM_Task, 'duration_secs': 0.456305} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.085987] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1192.086202] env[68233]: INFO nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Took 6.91 seconds to spawn the instance on the hypervisor. [ 1192.086379] env[68233]: DEBUG nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1192.087139] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b321d8c4-d304-48f9-8e7f-b7c82ea9fb17 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.104659] env[68233]: DEBUG oslo_concurrency.lockutils [req-26913354-feb6-4de0-ac78-1389629321d9 req-91b01e13-490f-482b-a014-b329240f07c0 service nova] Releasing lock "refresh_cache-ffc57efd-d031-4b09-8255-2498f01e8c78" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.210638] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Getting list of instances from cluster (obj){ [ 1192.210638] env[68233]: value = "domain-c8" [ 1192.210638] env[68233]: _type = "ClusterComputeResource" [ 1192.210638] env[68233]: } {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1192.213402] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71472362-7d4f-4b41-9d59-faf2ffe0d8b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.233746] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Got total of 9 instances {{(pid=68233) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1192.233902] env[68233]: WARNING nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] While synchronizing instance power states, found 10 instances in the database and 9 instances on the hypervisor. [ 1192.233962] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid dd59cab5-3f9a-42cc-93f1-75cea940acdd {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.234179] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.234353] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.234561] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid 9f862347-508b-4c8a-a338-97972b0c0b0b {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.234736] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid 171da032-9aeb-4972-8ec7-4181e2667ac0 {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.234962] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid 151b16bc-6b78-4527-8571-b07b5ad7db7b {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.235105] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.235235] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid aadc7dbe-456c-4bf3-b26d-bac672459fb9 {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.235396] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid 81e0800d-7731-433c-9238-b4aa07a4ddda {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.235574] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Triggering sync for uuid ffc57efd-d031-4b09-8255-2498f01e8c78 {{(pid=68233) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10863}} [ 1192.236650] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.236903] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.237239] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.237482] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.237689] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.237949] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "9f862347-508b-4c8a-a338-97972b0c0b0b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.238164] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.238350] env[68233]: INFO nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] During sync_power_state the instance has a pending task (resize_reverting). Skip. [ 1192.238530] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.238724] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.238950] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.239320] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.239390] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.239637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.239823] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.240096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "81e0800d-7731-433c-9238-b4aa07a4ddda" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.240299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.240536] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "ffc57efd-d031-4b09-8255-2498f01e8c78" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.240741] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.240897] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1192.241719] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8f65b4-3d90-435b-98d6-df79056fd79b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.245620] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933f43a9-629d-4fa7-aa19-479d0ac6cb7c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.248916] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884ac5e9-0f2b-4b54-b08d-945525374a1b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.251975] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c1138a-3983-422d-b521-b0bdcd4e97af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.255100] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bdae4f-799d-4e2a-96f4-f574930d4f0e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.257597] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1192.453032] env[68233]: DEBUG nova.scheduler.client.report [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1192.560223] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.605629] env[68233]: INFO nova.compute.manager [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Took 11.69 seconds to build instance. [ 1192.690338] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.719587] env[68233]: DEBUG oslo_concurrency.lockutils [None req-b2f7a742-e20d-45b1-b877-301c3742330c tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.241s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.720733] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.483s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.722230] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3ba88c-9cd7-4b43-95f5-330880788aae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.760656] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.775578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.777153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.540s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.777490] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.538s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.781977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.782296] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.061111] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.107333] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6ec91876-46f9-427b-82ca-1b19f2c59430 tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.200s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.107651] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.867s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.107851] env[68233]: INFO nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] During sync_power_state the instance has a pending task (spawning). Skip. [ 1193.108126] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.231543] env[68233]: INFO nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] During sync_power_state the instance has a pending task (powering-off). Skip. [ 1193.231847] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.511s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.232482] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.542s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.232482] env[68233]: DEBUG nova.compute.manager [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1193.233374] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4024915e-80c3-4a8d-a58f-2ad971efb983 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.240824] env[68233]: DEBUG nova.compute.manager [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1193.241436] env[68233]: DEBUG nova.objects.instance [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1193.465318] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.718s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.471025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.642s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.471242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.473208] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.713s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.473436] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.473553] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1193.479017] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c458fda9-301b-44ae-bebe-6cb31eba0dc0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.490532] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ba6254-8b9e-43fe-8f00-7712ac7262fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.495759] env[68233]: INFO nova.scheduler.client.report [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Deleted allocations for instance 151b16bc-6b78-4527-8571-b07b5ad7db7b [ 1193.511396] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5891af8-0d85-4d21-af58-5782635535c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.519907] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05981e3-c6a7-40b5-a0c1-6c7cdee22a09 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.552764] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180162MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1193.552959] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.553248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.563593] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.821061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "af8d2b01-b0a5-408b-ace3-dd085097b393" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.821243] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.014192] env[68233]: DEBUG oslo_concurrency.lockutils [None req-91bcdfce-4ada-473b-877f-823360eae531 tempest-DeleteServersTestJSON-938810114 tempest-DeleteServersTestJSON-938810114-project-member] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.182s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.015139] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.776s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.015545] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c16893c7-b5db-4c0c-85a8-771700929438 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.025098] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bae2db3-bc79-4cb9-b5fc-391cee2a1a0b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.073472] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.248583] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.248927] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49a20d2d-6cea-4b4b-9e27-191e31ca1ef3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.256444] env[68233]: DEBUG oslo_vmware.api [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1194.256444] env[68233]: value = "task-2783260" [ 1194.256444] env[68233]: _type = "Task" [ 1194.256444] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.264708] env[68233]: DEBUG oslo_vmware.api [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783260, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.323689] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1194.364078] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "ffc57efd-d031-4b09-8255-2498f01e8c78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.364454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.364731] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "ffc57efd-d031-4b09-8255-2498f01e8c78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.364896] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.365119] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.367640] env[68233]: INFO nova.compute.manager [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Terminating instance [ 1194.561565] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "151b16bc-6b78-4527-8571-b07b5ad7db7b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.567665] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.590545] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.590779] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance c2d04b37-3eae-46cb-a227-b62d36c62a6a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.590943] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.591132] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 171da032-9aeb-4972-8ec7-4181e2667ac0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.591298] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 863e15c6-caa4-47aa-902a-7be2c9538687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.591464] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance aadc7dbe-456c-4bf3-b26d-bac672459fb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.591635] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 81e0800d-7731-433c-9238-b4aa07a4ddda actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1194.767617] env[68233]: DEBUG oslo_vmware.api [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783260, 'name': PowerOffVM_Task, 'duration_secs': 0.191163} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.767823] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.768036] env[68233]: DEBUG nova.compute.manager [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1194.768971] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338dc832-0164-4b8c-bb33-80bf6bb4a8ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.841889] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.872540] env[68233]: DEBUG nova.compute.manager [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1194.872723] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.873756] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad274df-23f5-4a91-8b83-cbd712f8891c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.881537] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.881794] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-814fd99a-f4a0-4b80-b148-ea9316dd950e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.887742] env[68233]: DEBUG oslo_vmware.api [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1194.887742] env[68233]: value = "task-2783262" [ 1194.887742] env[68233]: _type = "Task" [ 1194.887742] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.895506] env[68233]: DEBUG oslo_vmware.api [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.009459] env[68233]: INFO nova.compute.manager [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Swapping old allocation on dict_keys(['51aa13e7-0977-4031-b209-4ae90c83752c']) held by migration 2700e9c7-07ee-4466-a2cd-1549a06a554f for instance [ 1195.034716] env[68233]: DEBUG nova.scheduler.client.report [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Overwriting current allocation {'allocations': {'51aa13e7-0977-4031-b209-4ae90c83752c': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 166}}, 'project_id': 'dbc7604c87d6485097fe5658d68217b9', 'user_id': '95122ece8b8b445aa04349a675f262b8', 'consumer_generation': 1} on consumer 9f862347-508b-4c8a-a338-97972b0c0b0b {{(pid=68233) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1195.068482] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.094774] env[68233]: INFO nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 2700e9c7-07ee-4466-a2cd-1549a06a554f has allocations against this compute host but is not found in the database. [ 1195.095011] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 9f862347-508b-4c8a-a338-97972b0c0b0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.095199] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance ffc57efd-d031-4b09-8255-2498f01e8c78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1195.129238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.129439] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.129636] env[68233]: DEBUG nova.network.neutron [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.280299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f6535b6d-149d-4033-9636-ff3a476a516f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.399106] env[68233]: DEBUG oslo_vmware.api [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783262, 'name': PowerOffVM_Task, 'duration_secs': 0.206236} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.399427] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1195.399598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1195.399848] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9073406-c99a-4a52-bc8f-00968ba40522 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.462957] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1195.463196] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1195.463387] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleting the datastore file [datastore2] ffc57efd-d031-4b09-8255-2498f01e8c78 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.463653] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-21a57262-94d2-44a4-87b6-1d3e361a6bf8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.469200] env[68233]: DEBUG oslo_vmware.api [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for the task: (returnval){ [ 1195.469200] env[68233]: value = "task-2783264" [ 1195.469200] env[68233]: _type = "Task" [ 1195.469200] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.476317] env[68233]: DEBUG oslo_vmware.api [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.568390] env[68233]: DEBUG oslo_vmware.api [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783251, 'name': ReconfigVM_Task, 'duration_secs': 5.754973} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.568706] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.568838] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Reconfigured VM to detach interface {{(pid=68233) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1195.571015] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.332s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.571801] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b36a557-6f53-43af-842d-dc28ce413ca7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.598135] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance af8d2b01-b0a5-408b-ace3-dd085097b393 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1195.598368] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1195.598511] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1195.751999] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb06336-6cf5-41c6-9013-a919fc8d34ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.760312] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d9b75b-44c2-4f1f-8297-a73a91d1f87e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.798763] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677583c5-1f3f-410d-82a7-b772743d8bdd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.806263] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b73eafc-2e3b-47ae-b2fe-bb649db3dce2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.820339] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.889794] env[68233]: DEBUG nova.network.neutron [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [{"id": "4ae388e9-417d-4206-9e31-b91986ba0652", "address": "fa:16:3e:88:ff:16", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ae388e9-41", "ovs_interfaceid": "4ae388e9-417d-4206-9e31-b91986ba0652", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.978970] env[68233]: DEBUG oslo_vmware.api [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Task: {'id': task-2783264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.454472} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.979259] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.979445] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1195.979636] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1195.979818] env[68233]: INFO nova.compute.manager [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1195.980064] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1195.980256] env[68233]: DEBUG nova.compute.manager [-] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1195.980347] env[68233]: DEBUG nova.network.neutron [-] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1196.079967] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.509s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.243906] env[68233]: DEBUG nova.objects.instance [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.266136] env[68233]: DEBUG nova.compute.manager [req-2099b132-a172-4166-8ce3-055ae779dae1 req-74776543-4a74-4c97-88d7-6c7b11bebfbb service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Received event network-vif-deleted-7975654b-2d0d-4348-8291-fd80199f2558 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1196.266279] env[68233]: INFO nova.compute.manager [req-2099b132-a172-4166-8ce3-055ae779dae1 req-74776543-4a74-4c97-88d7-6c7b11bebfbb service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Neutron deleted interface 7975654b-2d0d-4348-8291-fd80199f2558; detaching it from the instance and deleting it from the info cache [ 1196.266463] env[68233]: DEBUG nova.network.neutron [req-2099b132-a172-4166-8ce3-055ae779dae1 req-74776543-4a74-4c97-88d7-6c7b11bebfbb service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.324284] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.392594] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-9f862347-508b-4c8a-a338-97972b0c0b0b" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.393110] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.393422] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-51d84bfd-cbf3-43b8-9d4f-09c65ab6f47a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.400890] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1196.400890] env[68233]: value = "task-2783265" [ 1196.400890] env[68233]: _type = "Task" [ 1196.400890] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.409437] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.711053] env[68233]: DEBUG nova.network.neutron [-] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.748228] env[68233]: DEBUG oslo_concurrency.lockutils [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.748408] env[68233]: DEBUG oslo_concurrency.lockutils [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.748581] env[68233]: DEBUG nova.network.neutron [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1196.748758] env[68233]: DEBUG nova.objects.instance [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'info_cache' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.769115] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57013baa-0165-47b9-88ee-4165968350d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.780152] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22860dcd-ed97-4ea8-aa87-f9da1033ca86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.809335] env[68233]: DEBUG nova.compute.manager [req-2099b132-a172-4166-8ce3-055ae779dae1 req-74776543-4a74-4c97-88d7-6c7b11bebfbb service nova] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Detach interface failed, port_id=7975654b-2d0d-4348-8291-fd80199f2558, reason: Instance ffc57efd-d031-4b09-8255-2498f01e8c78 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1196.828753] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1196.828917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.276s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.829189] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.987s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1196.830713] env[68233]: INFO nova.compute.claims [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1196.910312] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783265, 'name': PowerOffVM_Task, 'duration_secs': 0.210559} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.910312] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.911069] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1196.911257] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1196.911397] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1196.911577] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1196.911726] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1196.911873] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1196.912083] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1196.912247] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1196.912413] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1196.912582] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1196.912752] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1196.917646] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67c49f3a-51f2-46f1-a32f-0dcf150dd470 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.932542] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1196.932542] env[68233]: value = "task-2783266" [ 1196.932542] env[68233]: _type = "Task" [ 1196.932542] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.941592] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.041622] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1197.041818] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquired lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1197.042012] env[68233]: DEBUG nova.network.neutron [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1197.127128] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.127397] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.127619] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.128248] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.128478] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.130515] env[68233]: INFO nova.compute.manager [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Terminating instance [ 1197.213146] env[68233]: INFO nova.compute.manager [-] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Took 1.23 seconds to deallocate network for instance. [ 1197.251698] env[68233]: DEBUG nova.objects.base [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1197.443296] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783266, 'name': ReconfigVM_Task, 'duration_secs': 0.13387} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.444754] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d172799-1773-437c-b9f1-5a3019cdfb93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.466360] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1197.466606] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1197.466767] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1197.466956] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1197.467114] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1197.467267] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1197.467469] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1197.467632] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1197.467798] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1197.467960] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1197.468151] env[68233]: DEBUG nova.virt.hardware [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1197.468910] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4048a6a1-ed99-424c-b66a-05223ac9ec2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.474136] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1197.474136] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526b895b-8693-d6a8-13a9-4ff1e159aec3" [ 1197.474136] env[68233]: _type = "Task" [ 1197.474136] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.482652] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526b895b-8693-d6a8-13a9-4ff1e159aec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.634041] env[68233]: DEBUG nova.compute.manager [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1197.634181] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1197.635081] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a74e805-419f-477b-9b65-e1365a65cfa0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.643199] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1197.643199] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb832718-14ab-4439-a1d5-cd09c09f50e2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.649377] env[68233]: DEBUG oslo_vmware.api [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1197.649377] env[68233]: value = "task-2783267" [ 1197.649377] env[68233]: _type = "Task" [ 1197.649377] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.656610] env[68233]: DEBUG oslo_vmware.api [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.719307] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.764995] env[68233]: INFO nova.network.neutron [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Port 9b3396e5-37ec-49f5-9da5-1c9cc423a97e from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1197.765402] env[68233]: DEBUG nova.network.neutron [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [{"id": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "address": "fa:16:3e:39:fa:b7", "network": {"id": "5065c922-1b9f-4d7f-8615-b5619dd4fc68", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1292614300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "74638e02258142a1a5170178faabb0ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "130387c4-e4ec-4d95-8e9d-bb079baabad8", "external-id": "nsx-vlan-transportzone-105", "segmentation_id": 105, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3ccdb5a-c4", "ovs_interfaceid": "d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.986039] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526b895b-8693-d6a8-13a9-4ff1e159aec3, 'name': SearchDatastore_Task, 'duration_secs': 0.006486} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.991791] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1197.992122] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d056aa3-c504-40e4-ae0c-7c5f4d547898 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.006833] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5746c322-12b3-4c3e-89c4-7e495ba110dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.015699] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd911bbb-7c4e-44c4-8ede-68e5860b0913 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.018803] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1198.018803] env[68233]: value = "task-2783268" [ 1198.018803] env[68233]: _type = "Task" [ 1198.018803] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.049908] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92a4c63-afe5-475a-b708-bdd397c3f24e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.056888] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783268, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.062097] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc31c52-cdf6-4a5b-bf59-3a6ccc007bb4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.076376] env[68233]: DEBUG nova.compute.provider_tree [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1198.160915] env[68233]: DEBUG oslo_vmware.api [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783267, 'name': PowerOffVM_Task, 'duration_secs': 0.215093} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.161125] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.161307] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1198.161582] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6cc36ffa-c2e4-4ecb-97f3-c9c28926ccdd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.231682] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1198.231894] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1198.232102] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleting the datastore file [datastore2] 171da032-9aeb-4972-8ec7-4181e2667ac0 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.232369] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67423d5c-10d1-4d98-aabe-369f2958f656 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.239462] env[68233]: DEBUG oslo_vmware.api [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1198.239462] env[68233]: value = "task-2783270" [ 1198.239462] env[68233]: _type = "Task" [ 1198.239462] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.249677] env[68233]: DEBUG oslo_vmware.api [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.268374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Releasing lock "refresh_cache-171da032-9aeb-4972-8ec7-4181e2667ac0" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1198.481840] env[68233]: DEBUG nova.network.neutron [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating instance_info_cache with network_info: [{"id": "16ec4545-d69d-43bf-a956-54414f895c1e", "address": "fa:16:3e:82:05:b4", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16ec4545-d6", "ovs_interfaceid": "16ec4545-d69d-43bf-a956-54414f895c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.528228] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783268, 'name': ReconfigVM_Task, 'duration_secs': 0.188932} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.528512] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1198.529323] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54a7c4d-6bf4-4ff5-ab18-e692bdd73694 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.550658] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1198.550803] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2309776d-81c5-4f34-a65f-8f8a0e9fdd06 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.568280] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1198.568280] env[68233]: value = "task-2783271" [ 1198.568280] env[68233]: _type = "Task" [ 1198.568280] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.575851] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.579759] env[68233]: DEBUG nova.scheduler.client.report [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1198.749659] env[68233]: DEBUG oslo_vmware.api [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148925} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.749940] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.750204] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1198.750403] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1198.750578] env[68233]: INFO nova.compute.manager [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1198.750821] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1198.751013] env[68233]: DEBUG nova.compute.manager [-] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1198.751116] env[68233]: DEBUG nova.network.neutron [-] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1198.771853] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a635197f-ea88-4df0-a5ae-c6d1452aa02b tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "interface-171da032-9aeb-4972-8ec7-4181e2667ac0-9b3396e5-37ec-49f5-9da5-1c9cc423a97e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.067s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.985585] env[68233]: DEBUG oslo_concurrency.lockutils [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "refresh_cache-c2d04b37-3eae-46cb-a227-b62d36c62a6a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.079475] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783271, 'name': ReconfigVM_Task, 'duration_secs': 0.255716} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.079760] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b/9f862347-508b-4c8a-a338-97972b0c0b0b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1199.081608] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9457a1a8-1c62-41b9-a86b-251de7902982 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.083992] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.255s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.084358] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1199.087053] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.368s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.087281] env[68233]: DEBUG nova.objects.instance [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lazy-loading 'resources' on Instance uuid ffc57efd-d031-4b09-8255-2498f01e8c78 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.107328] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4baca63f-f0fc-417d-a06e-ae40aeb25a70 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.128055] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847ff60e-34c4-4ddb-8ee9-3dae80a37b94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.148506] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf83d96-0b0c-4994-aa55-d2f0bac23c3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.157433] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.157433] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d1ae06c-80b3-4611-bbc9-01e8485ff9a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.162563] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1199.162563] env[68233]: value = "task-2783272" [ 1199.162563] env[68233]: _type = "Task" [ 1199.162563] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.170379] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.594011] env[68233]: DEBUG nova.compute.utils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1199.598554] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1199.598607] env[68233]: DEBUG nova.network.neutron [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1199.669986] env[68233]: DEBUG nova.policy [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da4cb00bd4c3405c88d8616b66b71e00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '14d2a0ead80a4efba8420023c31f8f11', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1199.675229] env[68233]: DEBUG oslo_vmware.api [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783272, 'name': PowerOnVM_Task, 'duration_secs': 0.379512} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.679100] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1199.789112] env[68233]: DEBUG nova.compute.manager [req-ec93f2e9-6eb0-4221-81aa-ba9728a6da57 req-d6a084e9-cf98-4612-8b49-df8068f0e0f7 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Received event network-vif-deleted-d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1199.789403] env[68233]: INFO nova.compute.manager [req-ec93f2e9-6eb0-4221-81aa-ba9728a6da57 req-d6a084e9-cf98-4612-8b49-df8068f0e0f7 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Neutron deleted interface d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c; detaching it from the instance and deleting it from the info cache [ 1199.790036] env[68233]: DEBUG nova.network.neutron [req-ec93f2e9-6eb0-4221-81aa-ba9728a6da57 req-d6a084e9-cf98-4612-8b49-df8068f0e0f7 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.801364] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83d4db4-0bee-4c1c-a9a2-85cb0b46f213 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.811530] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3470696c-4a72-4c6b-96a3-4bfe5e2372d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.847118] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f5efec-5a39-4a56-b078-f55102e020af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.854888] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cdc2533-49da-4bb3-9e97-067c143645d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.868781] env[68233]: DEBUG nova.compute.provider_tree [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.991822] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.992253] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-894dd394-d95e-44ad-94ce-680d2a646129 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.001740] env[68233]: DEBUG oslo_vmware.api [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1200.001740] env[68233]: value = "task-2783273" [ 1200.001740] env[68233]: _type = "Task" [ 1200.001740] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.008640] env[68233]: DEBUG oslo_vmware.api [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.086046] env[68233]: DEBUG nova.network.neutron [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Successfully created port: 343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1200.098750] env[68233]: DEBUG nova.network.neutron [-] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.100533] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1200.296149] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a0bf4bb-e485-4c52-b9d4-21801ff67f28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.307529] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c103bba-e0ce-4fa3-82df-ca5450cefc71 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.341435] env[68233]: DEBUG nova.compute.manager [req-ec93f2e9-6eb0-4221-81aa-ba9728a6da57 req-d6a084e9-cf98-4612-8b49-df8068f0e0f7 service nova] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Detach interface failed, port_id=d3ccdb5a-c47a-4c1a-bbab-97a2dab7185c, reason: Instance 171da032-9aeb-4972-8ec7-4181e2667ac0 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1200.372349] env[68233]: DEBUG nova.scheduler.client.report [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.385594] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1200.385863] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1200.386157] env[68233]: INFO nova.compute.manager [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Shelving [ 1200.509777] env[68233]: DEBUG oslo_vmware.api [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783273, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.604981] env[68233]: INFO nova.compute.manager [-] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Took 1.85 seconds to deallocate network for instance. [ 1200.606667] env[68233]: INFO nova.virt.block_device [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Booting with volume 078fe37a-d525-4476-a117-3378bb38267b at /dev/sda [ 1200.649081] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0f999d9-c7e8-4714-a93a-b4eaafb6ab51 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.658232] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a753fb80-469f-4e34-b2ce-654f14ceff1a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.685534] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b00ddcd8-58ec-4d80-80d3-8e11b662e078 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.691133] env[68233]: INFO nova.compute.manager [None req-8ac3e5e7-2ff3-4317-9da5-3e5a6927d7f0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance to original state: 'active' [ 1200.696725] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ea55c2-91ef-43a2-a0fe-478701713bbd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.726032] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7022e46e-04fb-4ce3-acaf-8bc628cb23c9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.733271] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aae88ce-a598-421b-a683-c2f2b35dbadc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.747421] env[68233]: DEBUG nova.virt.block_device [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating existing volume attachment record: 556f16f1-8bc8-495b-826a-da158c3c7f7b {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1200.877771] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.899937] env[68233]: INFO nova.scheduler.client.report [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Deleted allocations for instance ffc57efd-d031-4b09-8255-2498f01e8c78 [ 1201.010189] env[68233]: DEBUG oslo_vmware.api [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783273, 'name': PowerOnVM_Task, 'duration_secs': 0.62709} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.010505] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1201.010744] env[68233]: DEBUG nova.compute.manager [None req-77aabc45-0561-461f-808a-5a95439c3e91 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1201.011528] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aba2abb-9172-4ce1-89ef-d49c9650c37b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.119705] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.120081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.120281] env[68233]: DEBUG nova.objects.instance [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'resources' on Instance uuid 171da032-9aeb-4972-8ec7-4181e2667ac0 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.398951] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1201.399290] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb6a2e19-6e7b-43d0-aa63-3be96ec56874 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.406826] env[68233]: DEBUG oslo_concurrency.lockutils [None req-525987a5-9e00-4cf5-a4aa-83553d097dcb tempest-ServerDiskConfigTestJSON-1552432801 tempest-ServerDiskConfigTestJSON-1552432801-project-member] Lock "ffc57efd-d031-4b09-8255-2498f01e8c78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.042s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.408505] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1201.408505] env[68233]: value = "task-2783274" [ 1201.408505] env[68233]: _type = "Task" [ 1201.408505] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.416944] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783274, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.608939] env[68233]: DEBUG nova.network.neutron [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Successfully updated port: 343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1201.780325] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c6f86-01ec-4352-9c0d-1b6406155bab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.787714] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5eafc8-6352-402e-a4ed-7a92a945982e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.823754] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8ba5b7-816d-4182-9300-ac3b67f6bb56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.833695] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fe5cdc-a4e5-41a0-a57b-5e320a63a542 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.847726] env[68233]: DEBUG nova.compute.provider_tree [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.887999] env[68233]: DEBUG nova.compute.manager [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Received event network-vif-plugged-343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1201.888298] env[68233]: DEBUG oslo_concurrency.lockutils [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] Acquiring lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.888444] env[68233]: DEBUG oslo_concurrency.lockutils [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.888608] env[68233]: DEBUG oslo_concurrency.lockutils [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.888771] env[68233]: DEBUG nova.compute.manager [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] No waiting events found dispatching network-vif-plugged-343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1201.888931] env[68233]: WARNING nova.compute.manager [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Received unexpected event network-vif-plugged-343ba5d0-d87f-4796-a86c-5a2922804c78 for instance with vm_state building and task_state block_device_mapping. [ 1201.889413] env[68233]: DEBUG nova.compute.manager [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Received event network-changed-343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1201.889543] env[68233]: DEBUG nova.compute.manager [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Refreshing instance network info cache due to event network-changed-343ba5d0-d87f-4796-a86c-5a2922804c78. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1201.889709] env[68233]: DEBUG oslo_concurrency.lockutils [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] Acquiring lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.889843] env[68233]: DEBUG oslo_concurrency.lockutils [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] Acquired lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.889995] env[68233]: DEBUG nova.network.neutron [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Refreshing network info cache for port 343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1201.919422] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783274, 'name': PowerOffVM_Task, 'duration_secs': 0.330424} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.919422] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1201.920100] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0989434d-682f-49be-9a5a-246a1a7348c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.938786] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b488cc-aadd-4d32-9b0f-d380a8754abd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.112232] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.353026] env[68233]: DEBUG nova.scheduler.client.report [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.437574] env[68233]: DEBUG nova.network.neutron [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1202.449032] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1202.449179] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-eb44be77-928d-4618-ae87-0b37400b2f2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.457322] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1202.457322] env[68233]: value = "task-2783275" [ 1202.457322] env[68233]: _type = "Task" [ 1202.457322] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.468662] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783275, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.548916] env[68233]: DEBUG nova.network.neutron [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.576962] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "9f862347-508b-4c8a-a338-97972b0c0b0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.577349] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.577636] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.577916] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1202.578202] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.580928] env[68233]: INFO nova.compute.manager [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Terminating instance [ 1202.835394] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1202.835977] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1202.836212] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1202.836370] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1202.836556] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1202.836753] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1202.836998] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1202.837311] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1202.837548] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1202.837835] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1202.838068] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1202.838330] env[68233]: DEBUG nova.virt.hardware [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1202.839481] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2933efd-1084-4de2-a68b-c6f59ed41d6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.849546] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbf705f-6bcd-4bf7-bbbb-6fd610213356 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.855813] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.898773] env[68233]: INFO nova.scheduler.client.report [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted allocations for instance 171da032-9aeb-4972-8ec7-4181e2667ac0 [ 1202.969654] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783275, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.052090] env[68233]: DEBUG oslo_concurrency.lockutils [req-7edccc40-c6a0-49cf-90b0-fc9018efd3e2 req-c83b9d7a-5507-4578-9c2e-4ed4bf00cdfb service nova] Releasing lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.052505] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.052671] env[68233]: DEBUG nova.network.neutron [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1203.085108] env[68233]: DEBUG nova.compute.manager [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1203.085351] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1203.086293] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca477ec-439d-4c8d-8e57-bd4921a5abba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.094433] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1203.094697] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bb00887-5d13-420d-bdf3-3293549b82c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.100674] env[68233]: DEBUG oslo_vmware.api [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1203.100674] env[68233]: value = "task-2783276" [ 1203.100674] env[68233]: _type = "Task" [ 1203.100674] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.108903] env[68233]: DEBUG oslo_vmware.api [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783276, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.410927] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36759651-dc05-40d2-8384-788eea95141c tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "171da032-9aeb-4972-8ec7-4181e2667ac0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.283s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.469137] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783275, 'name': CreateSnapshot_Task, 'duration_secs': 0.815207} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.469368] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1203.470142] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94872f81-2807-42e2-87c7-c90ed10c3d97 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.582899] env[68233]: DEBUG nova.network.neutron [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1203.612120] env[68233]: DEBUG oslo_vmware.api [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783276, 'name': PowerOffVM_Task, 'duration_secs': 0.214064} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.612417] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.612619] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.613113] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd1a3aa8-8db3-49c9-ac69-1806dde0f0f4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.716290] env[68233]: DEBUG nova.network.neutron [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [{"id": "343ba5d0-d87f-4796-a86c-5a2922804c78", "address": "fa:16:3e:fc:56:ed", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343ba5d0-d8", "ovs_interfaceid": "343ba5d0-d87f-4796-a86c-5a2922804c78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.739831] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.740084] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.740272] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleting the datastore file [datastore2] 9f862347-508b-4c8a-a338-97972b0c0b0b {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.740532] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1933b69-5efb-45c6-aace-2ff15f72b6c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.747038] env[68233]: DEBUG oslo_vmware.api [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1203.747038] env[68233]: value = "task-2783278" [ 1203.747038] env[68233]: _type = "Task" [ 1203.747038] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.754572] env[68233]: DEBUG oslo_vmware.api [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783278, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.902797] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.903516] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.903516] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.903716] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.903945] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.906244] env[68233]: INFO nova.compute.manager [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Terminating instance [ 1203.987846] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1203.988441] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b7cda829-c2e5-486d-8885-b6f7bac11f6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.996419] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1203.996419] env[68233]: value = "task-2783279" [ 1203.996419] env[68233]: _type = "Task" [ 1203.996419] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.004733] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783279, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.219280] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.219572] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Instance network_info: |[{"id": "343ba5d0-d87f-4796-a86c-5a2922804c78", "address": "fa:16:3e:fc:56:ed", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343ba5d0-d8", "ovs_interfaceid": "343ba5d0-d87f-4796-a86c-5a2922804c78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1204.220038] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:56:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27abaf31-0f39-428c-a8d3-cd7548de6818', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '343ba5d0-d87f-4796-a86c-5a2922804c78', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1204.228555] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1204.228855] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1204.229158] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a5b0155-af02-462f-a8bb-e9f9b53c9d5b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.253250] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1204.253250] env[68233]: value = "task-2783280" [ 1204.253250] env[68233]: _type = "Task" [ 1204.253250] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.261922] env[68233]: DEBUG oslo_vmware.api [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783278, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432278} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.262782] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.263063] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1204.263371] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1204.263689] env[68233]: INFO nova.compute.manager [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1204.264041] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1204.268174] env[68233]: DEBUG nova.compute.manager [-] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1204.268349] env[68233]: DEBUG nova.network.neutron [-] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1204.270443] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783280, 'name': CreateVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.410225] env[68233]: DEBUG nova.compute.manager [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1204.410509] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1204.411495] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc68932b-0195-48d5-aff4-c5e50ba4d145 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.419473] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1204.419713] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3611e68c-db85-472c-81ab-2127ae77152f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.427357] env[68233]: DEBUG oslo_vmware.api [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1204.427357] env[68233]: value = "task-2783281" [ 1204.427357] env[68233]: _type = "Task" [ 1204.427357] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.435880] env[68233]: DEBUG oslo_vmware.api [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783281, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.511013] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783279, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.763690] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783280, 'name': CreateVM_Task, 'duration_secs': 0.348421} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.763831] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1204.764705] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'delete_on_termination': True, 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559534', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'name': 'volume-078fe37a-d525-4476-a117-3378bb38267b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af8d2b01-b0a5-408b-ace3-dd085097b393', 'attached_at': '', 'detached_at': '', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'serial': '078fe37a-d525-4476-a117-3378bb38267b'}, 'boot_index': 0, 'attachment_id': '556f16f1-8bc8-495b-826a-da158c3c7f7b', 'mount_device': '/dev/sda', 'disk_bus': None, 'guest_format': None, 'volume_type': None}], 'swap': None} {{(pid=68233) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1204.764931] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Root volume attach. Driver type: vmdk {{(pid=68233) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1204.765744] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cd5039-44e6-4310-8ebc-93dec20d9000 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.773322] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442abaa0-cd00-44aa-a863-04a396f6c8a1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.779251] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca052c0d-882e-4cd4-83bf-a0f2fe784ae5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.784910] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b268d7b6-8e74-4e81-a3bf-60da9f0fccde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.790902] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1204.790902] env[68233]: value = "task-2783282" [ 1204.790902] env[68233]: _type = "Task" [ 1204.790902] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.798242] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783282, 'name': RelocateVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.938374] env[68233]: DEBUG oslo_vmware.api [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783281, 'name': PowerOffVM_Task, 'duration_secs': 0.30771} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.938644] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1204.938922] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1204.939099] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99afd708-834b-47ce-be92-08fcf0cb3732 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.010605] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783279, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.015892] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1205.016077] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1205.016269] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleting the datastore file [datastore2] 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1205.016531] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-94caafd7-c061-480e-a09c-f5aea92b67a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.022700] env[68233]: DEBUG oslo_vmware.api [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for the task: (returnval){ [ 1205.022700] env[68233]: value = "task-2783284" [ 1205.022700] env[68233]: _type = "Task" [ 1205.022700] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.031598] env[68233]: DEBUG oslo_vmware.api [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783284, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.064160] env[68233]: DEBUG nova.compute.manager [req-f7c5327f-b32f-484a-bf92-c382f9570c38 req-e855e890-9c0b-41a6-ae87-98c0604a8847 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Received event network-vif-deleted-4ae388e9-417d-4206-9e31-b91986ba0652 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1205.064422] env[68233]: INFO nova.compute.manager [req-f7c5327f-b32f-484a-bf92-c382f9570c38 req-e855e890-9c0b-41a6-ae87-98c0604a8847 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Neutron deleted interface 4ae388e9-417d-4206-9e31-b91986ba0652; detaching it from the instance and deleting it from the info cache [ 1205.064552] env[68233]: DEBUG nova.network.neutron [req-f7c5327f-b32f-484a-bf92-c382f9570c38 req-e855e890-9c0b-41a6-ae87-98c0604a8847 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.301787] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783282, 'name': RelocateVM_Task, 'duration_secs': 0.024434} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.302465] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1205.302465] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559534', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'name': 'volume-078fe37a-d525-4476-a117-3378bb38267b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af8d2b01-b0a5-408b-ace3-dd085097b393', 'attached_at': '', 'detached_at': '', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'serial': '078fe37a-d525-4476-a117-3378bb38267b'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1205.303187] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1798c29d-3be9-421f-ae69-7e2413d7698f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.319018] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7db8c5a-977a-4461-8e25-751f8b246e36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.341673] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] volume-078fe37a-d525-4476-a117-3378bb38267b/volume-078fe37a-d525-4476-a117-3378bb38267b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.342364] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84f1fbf3-27e3-4f5a-b729-a327baeaf912 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.361541] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1205.361541] env[68233]: value = "task-2783285" [ 1205.361541] env[68233]: _type = "Task" [ 1205.361541] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.369922] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783285, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.512906] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783279, 'name': CloneVM_Task, 'duration_secs': 1.205825} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.513222] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Created linked-clone VM from snapshot [ 1205.513974] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5755dd-ebeb-42a4-a2e2-267864debb01 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.521392] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Uploading image 267e83e8-84b0-4301-9a90-8a1f48a5e360 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1205.531317] env[68233]: DEBUG oslo_vmware.api [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Task: {'id': task-2783284, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.411735} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.531550] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1205.531745] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1205.531918] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1205.532113] env[68233]: INFO nova.compute.manager [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1205.532344] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1205.532587] env[68233]: DEBUG nova.compute.manager [-] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1205.532645] env[68233]: DEBUG nova.network.neutron [-] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1205.545944] env[68233]: DEBUG nova.network.neutron [-] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1205.549661] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1205.549661] env[68233]: value = "vm-559537" [ 1205.549661] env[68233]: _type = "VirtualMachine" [ 1205.549661] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1205.549903] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e4bb2699-2b0b-4614-9762-271825067594 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.559800] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease: (returnval){ [ 1205.559800] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a36a29-8063-91e9-29d5-15eb9941adcb" [ 1205.559800] env[68233]: _type = "HttpNfcLease" [ 1205.559800] env[68233]: } obtained for exporting VM: (result){ [ 1205.559800] env[68233]: value = "vm-559537" [ 1205.559800] env[68233]: _type = "VirtualMachine" [ 1205.559800] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1205.560179] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the lease: (returnval){ [ 1205.560179] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a36a29-8063-91e9-29d5-15eb9941adcb" [ 1205.560179] env[68233]: _type = "HttpNfcLease" [ 1205.560179] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1205.568283] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1205.568283] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a36a29-8063-91e9-29d5-15eb9941adcb" [ 1205.568283] env[68233]: _type = "HttpNfcLease" [ 1205.568283] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1205.568725] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2e9976ed-7ab2-4bb1-a0f4-b96883a30c05 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.577753] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e982f897-0a99-4bf4-afbc-c37432d7878f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.611195] env[68233]: DEBUG nova.compute.manager [req-f7c5327f-b32f-484a-bf92-c382f9570c38 req-e855e890-9c0b-41a6-ae87-98c0604a8847 service nova] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Detach interface failed, port_id=4ae388e9-417d-4206-9e31-b91986ba0652, reason: Instance 9f862347-508b-4c8a-a338-97972b0c0b0b could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1205.871527] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783285, 'name': ReconfigVM_Task, 'duration_secs': 0.507294} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.871527] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfigured VM instance instance-00000078 to attach disk [datastore2] volume-078fe37a-d525-4476-a117-3378bb38267b/volume-078fe37a-d525-4476-a117-3378bb38267b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1205.876382] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84562034-cfbd-45a5-9890-0c638ab77a8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.894020] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1205.894020] env[68233]: value = "task-2783287" [ 1205.894020] env[68233]: _type = "Task" [ 1205.894020] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.901155] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783287, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.048264] env[68233]: INFO nova.compute.manager [-] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Took 1.78 seconds to deallocate network for instance. [ 1206.068046] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1206.068046] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a36a29-8063-91e9-29d5-15eb9941adcb" [ 1206.068046] env[68233]: _type = "HttpNfcLease" [ 1206.068046] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1206.068423] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1206.068423] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52a36a29-8063-91e9-29d5-15eb9941adcb" [ 1206.068423] env[68233]: _type = "HttpNfcLease" [ 1206.068423] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1206.069197] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c164d4db-e2b7-458c-b260-d5a8f422b380 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.077155] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5234d6ac-2690-76de-2c9a-21a6c39b167f/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1206.077336] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5234d6ac-2690-76de-2c9a-21a6c39b167f/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1206.248627] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ca28de49-1b87-47b8-aa7e-c87db5e1eefa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.336573] env[68233]: DEBUG nova.network.neutron [-] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1206.401746] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783287, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.555915] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1206.556193] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1206.556426] env[68233]: DEBUG nova.objects.instance [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'resources' on Instance uuid 9f862347-508b-4c8a-a338-97972b0c0b0b {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1206.839851] env[68233]: INFO nova.compute.manager [-] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Took 1.31 seconds to deallocate network for instance. [ 1206.903416] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783287, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.071162] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.071822] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.091546] env[68233]: DEBUG nova.compute.manager [req-41f84069-3b61-48b8-83ab-d7ba019549df req-80f7e402-2c58-4f8f-adc4-d8b5a91ff845 service nova] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Received event network-vif-deleted-1d10db7a-f783-4b60-b20a-834d68367b3c {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1207.183593] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef598657-6ab6-45d5-8080-36f36ce8bfab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.192406] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4751b152-7550-4170-b8b8-ba64d963266b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.223840] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb9da3d-eb36-461d-8094-ee8192d49438 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.232034] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eccc951-cc96-4439-9c9f-d5dd974b3758 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.247454] env[68233]: DEBUG nova.compute.provider_tree [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1207.348669] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.403678] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783287, 'name': ReconfigVM_Task, 'duration_secs': 1.13695} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.404321] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559534', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'name': 'volume-078fe37a-d525-4476-a117-3378bb38267b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af8d2b01-b0a5-408b-ace3-dd085097b393', 'attached_at': '', 'detached_at': '', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'serial': '078fe37a-d525-4476-a117-3378bb38267b'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1207.405839] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-182f3388-a65c-4541-841b-e6e3831f0b12 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.411677] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1207.411677] env[68233]: value = "task-2783288" [ 1207.411677] env[68233]: _type = "Task" [ 1207.411677] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.419679] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783288, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.574593] env[68233]: DEBUG nova.compute.utils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1207.751357] env[68233]: DEBUG nova.scheduler.client.report [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.923407] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783288, 'name': Rename_Task, 'duration_secs': 0.133591} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.923779] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1207.924152] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4199144d-3f68-43e6-8810-b9951a1f3a9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.930678] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1207.930678] env[68233]: value = "task-2783289" [ 1207.930678] env[68233]: _type = "Task" [ 1207.930678] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.939176] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.077775] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.257502] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.701s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.259997] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.912s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.260271] env[68233]: DEBUG nova.objects.instance [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lazy-loading 'resources' on Instance uuid 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1208.276279] env[68233]: INFO nova.scheduler.client.report [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted allocations for instance 9f862347-508b-4c8a-a338-97972b0c0b0b [ 1208.442009] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783289, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.784530] env[68233]: DEBUG oslo_concurrency.lockutils [None req-837a3363-e76e-45f8-9b5f-6598cf6178b0 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "9f862347-508b-4c8a-a338-97972b0c0b0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.207s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.862515] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89362237-01b7-480d-849b-ce5b617beff9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.871414] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3decb696-070d-4334-a443-8dd34805df26 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.901594] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cf9bed-d95c-4bea-8222-e3e05c1f9bcf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.909294] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce133ee-6371-455c-bdf1-001bdae9e033 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.925692] env[68233]: DEBUG nova.compute.provider_tree [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1208.940727] env[68233]: DEBUG oslo_vmware.api [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783289, 'name': PowerOnVM_Task, 'duration_secs': 0.553558} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.942122] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1208.942122] env[68233]: INFO nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Took 6.11 seconds to spawn the instance on the hypervisor. [ 1208.942122] env[68233]: DEBUG nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1208.942268] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f1c44b-032f-4c78-8ee2-40bde4401603 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.145518] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.145875] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.146088] env[68233]: INFO nova.compute.manager [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Attaching volume ed873c31-cc71-4640-9c21-d65eb646b508 to /dev/sdb [ 1209.182910] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb53c761-c37e-46ba-b229-a50f328e2557 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.190953] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450275ff-3c71-4e2c-a018-0db76e9e0108 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.205470] env[68233]: DEBUG nova.virt.block_device [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updating existing volume attachment record: e1f3426d-48a1-44cd-9ca6-95ea3c05f121 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1209.429360] env[68233]: DEBUG nova.scheduler.client.report [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1209.458946] env[68233]: INFO nova.compute.manager [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Took 14.63 seconds to build instance. [ 1209.935072] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.675s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.961765] env[68233]: DEBUG oslo_concurrency.lockutils [None req-fc4b966f-f906-43c4-a89c-000d28e9dfbe tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.140s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.962278] env[68233]: INFO nova.scheduler.client.report [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Deleted allocations for instance 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9 [ 1209.988565] env[68233]: DEBUG nova.compute.manager [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Received event network-changed-f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1209.988775] env[68233]: DEBUG nova.compute.manager [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Refreshing instance network info cache due to event network-changed-f8c8623e-abba-4da9-8ab2-20413bb09889. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1209.989055] env[68233]: DEBUG oslo_concurrency.lockutils [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] Acquiring lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.989210] env[68233]: DEBUG oslo_concurrency.lockutils [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] Acquired lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.989373] env[68233]: DEBUG nova.network.neutron [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Refreshing network info cache for port f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1210.161327] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.161714] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.475192] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dea1ea63-f1a4-4bc9-988d-c9c027889688 tempest-AttachInterfacesTestJSON-1626168707 tempest-AttachInterfacesTestJSON-1626168707-project-member] Lock "03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.572s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.664594] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1210.864839] env[68233]: DEBUG nova.network.neutron [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updated VIF entry in instance network info cache for port f8c8623e-abba-4da9-8ab2-20413bb09889. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.865283] env[68233]: DEBUG nova.network.neutron [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updating instance_info_cache with network_info: [{"id": "f8c8623e-abba-4da9-8ab2-20413bb09889", "address": "fa:16:3e:36:c6:63", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf8c8623e-ab", "ovs_interfaceid": "f8c8623e-abba-4da9-8ab2-20413bb09889", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.191127] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.191476] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.193310] env[68233]: INFO nova.compute.claims [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1211.368229] env[68233]: DEBUG oslo_concurrency.lockutils [req-ac0a8c76-9f36-4d57-9c1f-f1d9ca238254 req-210f5772-a201-45b8-bbc7-4894fda05899 service nova] Releasing lock "refresh_cache-dd59cab5-3f9a-42cc-93f1-75cea940acdd" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.417675] env[68233]: DEBUG nova.compute.manager [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1211.940014] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.019164] env[68233]: DEBUG nova.compute.manager [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Received event network-changed-343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1212.019451] env[68233]: DEBUG nova.compute.manager [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Refreshing instance network info cache due to event network-changed-343ba5d0-d87f-4796-a86c-5a2922804c78. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1212.019597] env[68233]: DEBUG oslo_concurrency.lockutils [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] Acquiring lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.020346] env[68233]: DEBUG oslo_concurrency.lockutils [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] Acquired lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1212.020346] env[68233]: DEBUG nova.network.neutron [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Refreshing network info cache for port 343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1212.334537] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a18a99-b20a-45b0-96b4-afdc3a2c6cec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.343476] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d073b84-4f79-452e-ba6b-7d2b08ff54cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.375569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66aa6831-df27-4beb-b043-41d0eacb2a30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.383724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a414ad-f095-4ccd-95ef-51812239e704 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.396750] env[68233]: DEBUG nova.compute.provider_tree [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1212.866674] env[68233]: DEBUG nova.network.neutron [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updated VIF entry in instance network info cache for port 343ba5d0-d87f-4796-a86c-5a2922804c78. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.867337] env[68233]: DEBUG nova.network.neutron [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [{"id": "343ba5d0-d87f-4796-a86c-5a2922804c78", "address": "fa:16:3e:fc:56:ed", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343ba5d0-d8", "ovs_interfaceid": "343ba5d0-d87f-4796-a86c-5a2922804c78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.918637] env[68233]: ERROR nova.scheduler.client.report [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [req-da3bfa3d-6919-4756-b3c2-4ac9f9f0bbe0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-da3bfa3d-6919-4756-b3c2-4ac9f9f0bbe0"}]} [ 1212.937415] env[68233]: DEBUG nova.scheduler.client.report [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1212.952332] env[68233]: DEBUG nova.scheduler.client.report [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1212.952565] env[68233]: DEBUG nova.compute.provider_tree [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1212.965466] env[68233]: DEBUG nova.scheduler.client.report [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1212.987202] env[68233]: DEBUG nova.scheduler.client.report [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1213.103488] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4eb5ae-3bc4-4e8d-ab82-c26cf2665602 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.111874] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66898c66-77d0-4ff1-ba38-f00b888d28b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.142448] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b736da9-4924-45ff-9938-9bdbbcf45088 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.150481] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30703dce-7887-44cd-ba09-951ff4658482 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.165150] env[68233]: DEBUG nova.compute.provider_tree [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1213.369930] env[68233]: DEBUG oslo_concurrency.lockutils [req-b959845b-1c9f-4977-a4cd-0a81c96cd661 req-c5a318b2-0ed0-468b-a9ab-16a712ad3916 service nova] Releasing lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1213.697814] env[68233]: DEBUG nova.scheduler.client.report [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 170 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1213.698094] env[68233]: DEBUG nova.compute.provider_tree [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 170 to 171 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1213.698284] env[68233]: DEBUG nova.compute.provider_tree [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1213.947739] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5234d6ac-2690-76de-2c9a-21a6c39b167f/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1213.948828] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc8a039-2a2a-40f7-af73-020109fc76d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.955811] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5234d6ac-2690-76de-2c9a-21a6c39b167f/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1213.956038] env[68233]: ERROR oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5234d6ac-2690-76de-2c9a-21a6c39b167f/disk-0.vmdk due to incomplete transfer. [ 1213.956300] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-cb7551be-1a65-4aa6-b735-fbd03e15cf43 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.963873] env[68233]: DEBUG oslo_vmware.rw_handles [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5234d6ac-2690-76de-2c9a-21a6c39b167f/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1213.964875] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Uploaded image 267e83e8-84b0-4301-9a90-8a1f48a5e360 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1213.966513] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1213.966767] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2e75b3c0-39f8-4ed6-98e2-4c66889b8554 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.973664] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1213.973664] env[68233]: value = "task-2783294" [ 1213.973664] env[68233]: _type = "Task" [ 1213.973664] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.982029] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783294, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.202963] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.011s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.203515] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1214.206366] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.266s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.253418] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1214.253662] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559540', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'name': 'volume-ed873c31-cc71-4640-9c21-d65eb646b508', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aadc7dbe-456c-4bf3-b26d-bac672459fb9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'serial': 'ed873c31-cc71-4640-9c21-d65eb646b508'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1214.254649] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81265057-1bc8-4b64-868e-db2b8a9732c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.272014] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bf61af-8e4e-4100-b3d9-a24cbf35abc0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.296515] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] volume-ed873c31-cc71-4640-9c21-d65eb646b508/volume-ed873c31-cc71-4640-9c21-d65eb646b508.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1214.296768] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f74609d-66d0-41c5-810a-90ac0c31ced6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.314282] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1214.314282] env[68233]: value = "task-2783295" [ 1214.314282] env[68233]: _type = "Task" [ 1214.314282] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.321950] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783295, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.483646] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783294, 'name': Destroy_Task, 'duration_secs': 0.3601} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.483992] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Destroyed the VM [ 1214.484104] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1214.484349] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f75d9bdf-0142-491a-aca2-6cf78755e580 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.491625] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1214.491625] env[68233]: value = "task-2783296" [ 1214.491625] env[68233]: _type = "Task" [ 1214.491625] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.499103] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783296, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.709724] env[68233]: DEBUG nova.compute.utils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1214.712951] env[68233]: INFO nova.compute.claims [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1214.717124] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1214.717124] env[68233]: DEBUG nova.network.neutron [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1214.765386] env[68233]: DEBUG nova.policy [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '95122ece8b8b445aa04349a675f262b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dbc7604c87d6485097fe5658d68217b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1214.824808] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783295, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.005925] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783296, 'name': RemoveSnapshot_Task, 'duration_secs': 0.395386} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.006248] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1215.006546] env[68233]: DEBUG nova.compute.manager [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1215.007806] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12ed05e-727c-437a-84d2-f7c227e5757d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.131195] env[68233]: DEBUG nova.network.neutron [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Successfully created port: e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1215.217768] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1215.222453] env[68233]: INFO nova.compute.resource_tracker [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating resource usage from migration 95577185-409f-424f-929a-13e5d6c05d91 [ 1215.328021] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783295, 'name': ReconfigVM_Task, 'duration_secs': 0.700251} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.328021] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Reconfigured VM instance instance-00000075 to attach disk [datastore2] volume-ed873c31-cc71-4640-9c21-d65eb646b508/volume-ed873c31-cc71-4640-9c21-d65eb646b508.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1215.331642] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d1319e0-5570-4b1b-b467-92dddba1d30e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.351015] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1215.351015] env[68233]: value = "task-2783297" [ 1215.351015] env[68233]: _type = "Task" [ 1215.351015] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.353579] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b512155-1ee1-478e-a3d6-58a56ecc8261 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.362723] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783297, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.363819] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97944387-12d6-4b88-bb78-1d4af8095cd9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.397113] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254f3abc-aa08-4c9b-b341-5a107901c57a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.404451] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e0d1bc-3fac-4832-ab06-4f2b73688d54 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.417353] env[68233]: DEBUG nova.compute.provider_tree [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.519856] env[68233]: INFO nova.compute.manager [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Shelve offloading [ 1215.858359] env[68233]: DEBUG oslo_vmware.api [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783297, 'name': ReconfigVM_Task, 'duration_secs': 0.273512} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.858606] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559540', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'name': 'volume-ed873c31-cc71-4640-9c21-d65eb646b508', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aadc7dbe-456c-4bf3-b26d-bac672459fb9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'serial': 'ed873c31-cc71-4640-9c21-d65eb646b508'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1215.920308] env[68233]: DEBUG nova.scheduler.client.report [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1216.023385] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1216.023721] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea93f2c7-7bb0-4cf6-89ef-cbac06aa7650 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.031249] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1216.031249] env[68233]: value = "task-2783298" [ 1216.031249] env[68233]: _type = "Task" [ 1216.031249] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.038783] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783298, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.228813] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1216.255174] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1216.255443] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1216.255610] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1216.255797] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1216.255963] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1216.256130] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1216.256344] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1216.256507] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1216.256677] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1216.256840] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1216.257038] env[68233]: DEBUG nova.virt.hardware [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1216.257896] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e450d410-e502-4e22-89de-df0e079b61a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.265711] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdb8805-c07b-4f18-b86a-769d3246717f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.425166] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.219s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.425404] env[68233]: INFO nova.compute.manager [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Migrating [ 1216.540035] env[68233]: DEBUG nova.compute.manager [req-50937f78-9f58-4430-8c09-7cacab94ca7b req-f2796008-5305-4ca6-ad1f-6d06e007cc02 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Received event network-vif-plugged-e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1216.540302] env[68233]: DEBUG oslo_concurrency.lockutils [req-50937f78-9f58-4430-8c09-7cacab94ca7b req-f2796008-5305-4ca6-ad1f-6d06e007cc02 service nova] Acquiring lock "f24af50e-90cd-4398-84d1-a1e1849d01d6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.540539] env[68233]: DEBUG oslo_concurrency.lockutils [req-50937f78-9f58-4430-8c09-7cacab94ca7b req-f2796008-5305-4ca6-ad1f-6d06e007cc02 service nova] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.540686] env[68233]: DEBUG oslo_concurrency.lockutils [req-50937f78-9f58-4430-8c09-7cacab94ca7b req-f2796008-5305-4ca6-ad1f-6d06e007cc02 service nova] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.540841] env[68233]: DEBUG nova.compute.manager [req-50937f78-9f58-4430-8c09-7cacab94ca7b req-f2796008-5305-4ca6-ad1f-6d06e007cc02 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] No waiting events found dispatching network-vif-plugged-e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1216.541026] env[68233]: WARNING nova.compute.manager [req-50937f78-9f58-4430-8c09-7cacab94ca7b req-f2796008-5305-4ca6-ad1f-6d06e007cc02 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Received unexpected event network-vif-plugged-e303f634-41bf-4e57-9c9a-6555e22b32bc for instance with vm_state building and task_state spawning. [ 1216.547990] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1216.548242] env[68233]: DEBUG nova.compute.manager [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1216.549319] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58cc0e4-f909-4490-8d48-1554d3639b50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.555690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.555916] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.556052] env[68233]: DEBUG nova.network.neutron [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1216.631877] env[68233]: DEBUG nova.network.neutron [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Successfully updated port: e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1216.898628] env[68233]: DEBUG nova.objects.instance [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'flavor' on Instance uuid aadc7dbe-456c-4bf3-b26d-bac672459fb9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.942121] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1216.942302] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1216.942479] env[68233]: DEBUG nova.network.neutron [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1217.134848] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.135110] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.135235] env[68233]: DEBUG nova.network.neutron [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1217.405577] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e23835c2-e60b-420f-850c-e78d487622b6 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.259s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.475562] env[68233]: DEBUG nova.network.neutron [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.644747] env[68233]: DEBUG nova.network.neutron [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [{"id": "343ba5d0-d87f-4796-a86c-5a2922804c78", "address": "fa:16:3e:fc:56:ed", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343ba5d0-d8", "ovs_interfaceid": "343ba5d0-d87f-4796-a86c-5a2922804c78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.677054] env[68233]: DEBUG nova.network.neutron [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1217.980355] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.105199] env[68233]: DEBUG nova.network.neutron [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [{"id": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "address": "fa:16:3e:3e:ff:b1", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape303f634-41", "ovs_interfaceid": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.147876] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.199417] env[68233]: DEBUG nova.compute.manager [req-61890205-3249-4a33-8f43-55b969a79c23 req-9c660ece-b0ed-4245-937d-474d219d1871 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-vif-unplugged-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1218.199417] env[68233]: DEBUG oslo_concurrency.lockutils [req-61890205-3249-4a33-8f43-55b969a79c23 req-9c660ece-b0ed-4245-937d-474d219d1871 service nova] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1218.199709] env[68233]: DEBUG oslo_concurrency.lockutils [req-61890205-3249-4a33-8f43-55b969a79c23 req-9c660ece-b0ed-4245-937d-474d219d1871 service nova] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.199774] env[68233]: DEBUG oslo_concurrency.lockutils [req-61890205-3249-4a33-8f43-55b969a79c23 req-9c660ece-b0ed-4245-937d-474d219d1871 service nova] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.200092] env[68233]: DEBUG nova.compute.manager [req-61890205-3249-4a33-8f43-55b969a79c23 req-9c660ece-b0ed-4245-937d-474d219d1871 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] No waiting events found dispatching network-vif-unplugged-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1218.200196] env[68233]: WARNING nova.compute.manager [req-61890205-3249-4a33-8f43-55b969a79c23 req-9c660ece-b0ed-4245-937d-474d219d1871 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received unexpected event network-vif-unplugged-b4077afe-a7b6-4653-be23-4c735d67fa05 for instance with vm_state shelved and task_state shelving_offloading. [ 1218.299604] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1218.300558] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c38012-6796-44de-99c7-63d59c9601e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.308401] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1218.308649] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df6d65eb-4d08-4234-ba74-62df183dbbf2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.377611] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1218.377840] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1218.378043] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleting the datastore file [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1218.378309] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-138bfbdc-3cca-4809-9550-080873a6a3ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.388319] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1218.388319] env[68233]: value = "task-2783300" [ 1218.388319] env[68233]: _type = "Task" [ 1218.388319] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.397536] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783300, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.594017] env[68233]: DEBUG nova.compute.manager [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Received event network-changed-e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1218.594275] env[68233]: DEBUG nova.compute.manager [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Refreshing instance network info cache due to event network-changed-e303f634-41bf-4e57-9c9a-6555e22b32bc. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1218.594571] env[68233]: DEBUG oslo_concurrency.lockutils [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] Acquiring lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1218.607195] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1218.607547] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Instance network_info: |[{"id": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "address": "fa:16:3e:3e:ff:b1", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape303f634-41", "ovs_interfaceid": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1218.607790] env[68233]: DEBUG oslo_concurrency.lockutils [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] Acquired lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1218.608067] env[68233]: DEBUG nova.network.neutron [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Refreshing network info cache for port e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1218.609263] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:ff:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e303f634-41bf-4e57-9c9a-6555e22b32bc', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1218.616714] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1218.621034] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1218.621472] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c946cc0c-4aec-4fe6-8f65-cd8fe0f3db5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.641087] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1218.641087] env[68233]: value = "task-2783301" [ 1218.641087] env[68233]: _type = "Task" [ 1218.641087] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.648886] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783301, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.816210] env[68233]: DEBUG nova.network.neutron [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updated VIF entry in instance network info cache for port e303f634-41bf-4e57-9c9a-6555e22b32bc. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1218.816605] env[68233]: DEBUG nova.network.neutron [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [{"id": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "address": "fa:16:3e:3e:ff:b1", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape303f634-41", "ovs_interfaceid": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.898255] env[68233]: DEBUG oslo_vmware.api [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783300, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138689} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.898513] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1218.898707] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1218.898888] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1218.915192] env[68233]: INFO nova.scheduler.client.report [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted allocations for instance 863e15c6-caa4-47aa-902a-7be2c9538687 [ 1219.121627] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "916774a9-bfd3-4931-bc3a-1d50471a1c40" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.121876] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.150591] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783301, 'name': CreateVM_Task, 'duration_secs': 0.328678} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.150745] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1219.151391] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.151561] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.151878] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1219.152139] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6d985ab-93cb-4bc9-a42e-9aafdfef4fad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.158633] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1219.158633] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ab9bde-4fbc-b072-8803-7d193fe946f8" [ 1219.158633] env[68233]: _type = "Task" [ 1219.158633] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.166630] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ab9bde-4fbc-b072-8803-7d193fe946f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.319865] env[68233]: DEBUG oslo_concurrency.lockutils [req-75ad122a-5d28-4ce6-b9ae-b81299488725 req-5b4f5f54-7642-4600-b43c-671f1ab35d37 service nova] Releasing lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.419938] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.420265] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.420494] env[68233]: DEBUG nova.objects.instance [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'resources' on Instance uuid 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.625909] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1219.666435] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3a8587-e2bc-4174-b614-abf88432b037 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.675365] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ab9bde-4fbc-b072-8803-7d193fe946f8, 'name': SearchDatastore_Task, 'duration_secs': 0.009731} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.690423] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1219.690657] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1219.690916] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1219.691075] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1219.691258] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1219.691821] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1219.694896] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63907fca-d9a6-41a5-8cab-6be3ee74ae7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.703167] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1219.703349] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1219.704046] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a0f51b2-7d9a-44b7-81ff-3f053e8788e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.708875] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1219.708875] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526f6293-bc15-fb68-cfbd-f39c83096946" [ 1219.708875] env[68233]: _type = "Task" [ 1219.708875] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1219.717198] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526f6293-bc15-fb68-cfbd-f39c83096946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.923639] env[68233]: DEBUG nova.objects.instance [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'numa_topology' on Instance uuid 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1220.146990] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.199350] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.199739] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c26c596f-f69a-44fe-980e-4fd1d6985666 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.207155] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1220.207155] env[68233]: value = "task-2783302" [ 1220.207155] env[68233]: _type = "Task" [ 1220.207155] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.219257] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526f6293-bc15-fb68-cfbd-f39c83096946, 'name': SearchDatastore_Task, 'duration_secs': 0.010086} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.223106] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.223654] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd861ed3-155b-4274-a1fd-b0fa28991dc2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.227691] env[68233]: DEBUG nova.compute.manager [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1220.227902] env[68233]: DEBUG nova.compute.manager [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing instance network info cache due to event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1220.228139] env[68233]: DEBUG oslo_concurrency.lockutils [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.228308] env[68233]: DEBUG oslo_concurrency.lockutils [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.228521] env[68233]: DEBUG nova.network.neutron [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1220.233651] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1220.233651] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c93062-01bd-9084-089b-2e8b760cd718" [ 1220.233651] env[68233]: _type = "Task" [ 1220.233651] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.241519] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c93062-01bd-9084-089b-2e8b760cd718, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.426505] env[68233]: DEBUG nova.objects.base [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Object Instance<863e15c6-caa4-47aa-902a-7be2c9538687> lazy-loaded attributes: resources,numa_topology {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1220.529585] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d8dbb7-1078-4099-980a-4f37f66ef4da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.537323] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18082fb5-b482-4f2d-af82-bad769a7d00f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.570026] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e4063c-f43c-4bab-bd78-fc671ad8ef91 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.576566] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c19d631-7a32-4aac-ac46-e656d673ea1f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.589965] env[68233]: DEBUG nova.compute.provider_tree [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1220.717470] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783302, 'name': PowerOffVM_Task, 'duration_secs': 0.158958} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.717822] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1220.718100] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1220.743441] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c93062-01bd-9084-089b-2e8b760cd718, 'name': SearchDatastore_Task, 'duration_secs': 0.010372} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.743778] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1220.744041] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f24af50e-90cd-4398-84d1-a1e1849d01d6/f24af50e-90cd-4398-84d1-a1e1849d01d6.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1220.744332] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e50f6db-8865-469f-a62e-49024eed760c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.752030] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1220.752030] env[68233]: value = "task-2783303" [ 1220.752030] env[68233]: _type = "Task" [ 1220.752030] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.760645] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.970390] env[68233]: DEBUG nova.network.neutron [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updated VIF entry in instance network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1220.970820] env[68233]: DEBUG nova.network.neutron [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb4077afe-a7", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.093840] env[68233]: DEBUG nova.scheduler.client.report [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1221.225223] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1221.225517] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.225680] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1221.225870] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.226037] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1221.226191] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1221.226395] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1221.226556] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1221.226727] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1221.226894] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1221.227514] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1221.232015] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11a879b8-6ef5-45a6-96c4-1c4dfbc7271f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.252772] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1221.252772] env[68233]: value = "task-2783304" [ 1221.252772] env[68233]: _type = "Task" [ 1221.252772] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.265309] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783304, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.268344] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.427372} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.268580] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] f24af50e-90cd-4398-84d1-a1e1849d01d6/f24af50e-90cd-4398-84d1-a1e1849d01d6.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1221.268791] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1221.269285] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b56e5c82-cdcd-4e59-bf4f-4b2f8201119e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.275708] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1221.275708] env[68233]: value = "task-2783305" [ 1221.275708] env[68233]: _type = "Task" [ 1221.275708] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.283989] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783305, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1221.473618] env[68233]: DEBUG oslo_concurrency.lockutils [req-f54e3bf4-db5c-47f9-bc0f-43ad936bb529 req-6c73bbc3-8494-4752-a986-1e5be14c409a service nova] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1221.598805] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.178s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.601654] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.455s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.603375] env[68233]: INFO nova.compute.claims [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1221.749157] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.765690] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783304, 'name': ReconfigVM_Task, 'duration_secs': 0.164831} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.766111] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1221.785086] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783305, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062407} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.785437] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1221.786250] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf91953-52bd-4882-965d-f30079f94024 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.808276] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] f24af50e-90cd-4398-84d1-a1e1849d01d6/f24af50e-90cd-4398-84d1-a1e1849d01d6.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1221.809227] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1116ed53-51b6-432a-abb1-1d56cccc5c27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.828856] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1221.828856] env[68233]: value = "task-2783306" [ 1221.828856] env[68233]: _type = "Task" [ 1221.828856] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1221.837283] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783306, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.113808] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ed3e1c48-f5cb-42b3-b8ab-98aa8d435aed tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.728s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.114831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.366s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.115244] env[68233]: INFO nova.compute.manager [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Unshelving [ 1222.275211] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1222.275544] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1222.275722] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1222.275939] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1222.276107] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1222.276261] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1222.276469] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1222.276632] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1222.276803] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1222.276965] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1222.277162] env[68233]: DEBUG nova.virt.hardware [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1222.282442] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1222.283084] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6655db21-fa26-4dd6-a8fa-d54a90da30a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.302028] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1222.302028] env[68233]: value = "task-2783307" [ 1222.302028] env[68233]: _type = "Task" [ 1222.302028] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.310143] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783307, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.337986] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783306, 'name': ReconfigVM_Task, 'duration_secs': 0.334046} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.338292] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Reconfigured VM instance instance-00000079 to attach disk [datastore2] f24af50e-90cd-4398-84d1-a1e1849d01d6/f24af50e-90cd-4398-84d1-a1e1849d01d6.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1222.338944] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17c730f0-3c22-4df1-85e3-fa26386d535e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.345126] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1222.345126] env[68233]: value = "task-2783308" [ 1222.345126] env[68233]: _type = "Task" [ 1222.345126] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.352182] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783308, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.721724] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a2d5f2-e8a5-4065-a462-0266130edc68 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.729540] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843d7a0a-99d4-4300-8735-541a10ea335e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.760363] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7000eb-4525-426c-b5ce-631d32bc4cae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.767354] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517434e7-5a25-444f-a165-bc5cd3eefb73 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.779993] env[68233]: DEBUG nova.compute.provider_tree [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1222.814461] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783307, 'name': ReconfigVM_Task, 'duration_secs': 0.169322} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.814914] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1222.815956] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e9e952-d5f0-41f6-a1c9-cfdc53380c57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.837162] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] volume-078fe37a-d525-4476-a117-3378bb38267b/volume-078fe37a-d525-4476-a117-3378bb38267b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1222.837401] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7fc077f-2b59-495a-a341-7403af8004b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.857506] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783308, 'name': Rename_Task, 'duration_secs': 0.157333} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.858604] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1222.858899] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1222.858899] env[68233]: value = "task-2783309" [ 1222.858899] env[68233]: _type = "Task" [ 1222.858899] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.859095] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f168e17-8c61-462b-9004-0bd73e6f7f9b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.868074] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783309, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.869220] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1222.869220] env[68233]: value = "task-2783310" [ 1222.869220] env[68233]: _type = "Task" [ 1222.869220] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.875957] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.140025] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.283279] env[68233]: DEBUG nova.scheduler.client.report [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1223.369484] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783309, 'name': ReconfigVM_Task, 'duration_secs': 0.263899} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.369823] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfigured VM instance instance-00000078 to attach disk [datastore2] volume-078fe37a-d525-4476-a117-3378bb38267b/volume-078fe37a-d525-4476-a117-3378bb38267b.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1223.370083] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1223.381447] env[68233]: DEBUG oslo_vmware.api [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783310, 'name': PowerOnVM_Task, 'duration_secs': 0.421069} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.381447] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1223.381447] env[68233]: INFO nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Took 7.15 seconds to spawn the instance on the hypervisor. [ 1223.381447] env[68233]: DEBUG nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1223.382023] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc1b4c5-edac-4dd5-86aa-9c0bb67e4835 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.788874] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1223.789450] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1223.792127] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.652s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.792344] env[68233]: DEBUG nova.objects.instance [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'pci_requests' on Instance uuid 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1223.878955] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad92eb37-9ef0-458e-bfd6-9089371c199a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.901969] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2cd065-303b-4451-a449-6b68cb896571 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.907249] env[68233]: INFO nova.compute.manager [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Took 12.74 seconds to build instance. [ 1223.921232] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1224.295035] env[68233]: DEBUG nova.compute.utils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1224.297779] env[68233]: DEBUG nova.objects.instance [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'numa_topology' on Instance uuid 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1224.298884] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1224.299045] env[68233]: DEBUG nova.network.neutron [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1224.343585] env[68233]: DEBUG nova.policy [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5879d5d831004ae3b4273284da66358d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd564a0ed01a84ffca782d1344faba070', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1224.408796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c3f130f5-f550-4e06-abd3-294a1c7dd810 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.247s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.552992] env[68233]: DEBUG nova.compute.manager [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Received event network-changed-e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1224.553266] env[68233]: DEBUG nova.compute.manager [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Refreshing instance network info cache due to event network-changed-e303f634-41bf-4e57-9c9a-6555e22b32bc. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1224.553488] env[68233]: DEBUG oslo_concurrency.lockutils [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] Acquiring lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.553748] env[68233]: DEBUG oslo_concurrency.lockutils [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] Acquired lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.553790] env[68233]: DEBUG nova.network.neutron [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Refreshing network info cache for port e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.628372] env[68233]: DEBUG nova.network.neutron [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Successfully created port: e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1224.799832] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1224.802708] env[68233]: INFO nova.compute.claims [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1225.354777] env[68233]: DEBUG nova.network.neutron [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updated VIF entry in instance network info cache for port e303f634-41bf-4e57-9c9a-6555e22b32bc. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1225.355216] env[68233]: DEBUG nova.network.neutron [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [{"id": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "address": "fa:16:3e:3e:ff:b1", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape303f634-41", "ovs_interfaceid": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1225.562707] env[68233]: DEBUG nova.network.neutron [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Port 343ba5d0-d87f-4796-a86c-5a2922804c78 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1225.812214] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1225.840831] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1225.841105] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1225.841303] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1225.841582] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1225.841770] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1225.841925] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1225.842185] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1225.842389] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1225.842559] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1225.842750] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1225.842941] env[68233]: DEBUG nova.virt.hardware [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1225.843872] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea5446f-3db2-4dab-855a-07f35bf4446b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.854334] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31fcdd27-0ad3-46e8-8bdf-0aea720d708e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.858848] env[68233]: DEBUG oslo_concurrency.lockutils [req-4cfe0dca-49c9-4ab1-b1e1-81a2edb717bf req-6b43773d-3081-40c1-a104-019932b08aea service nova] Releasing lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.953960] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb63c12-11fa-4f94-bc88-0cc3fd22372e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.962068] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e454ef0c-aa85-4eb0-9442-df403570cc82 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.998290] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d49def-3040-4488-87ad-89f394574c0e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.001788] env[68233]: DEBUG nova.compute.manager [req-da99bd87-f374-446e-a364-a42ad5e89a7c req-8e40ee24-e9c9-41d5-8fba-ecc9ec16278d service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Received event network-vif-plugged-e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1226.002073] env[68233]: DEBUG oslo_concurrency.lockutils [req-da99bd87-f374-446e-a364-a42ad5e89a7c req-8e40ee24-e9c9-41d5-8fba-ecc9ec16278d service nova] Acquiring lock "916774a9-bfd3-4931-bc3a-1d50471a1c40-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.002333] env[68233]: DEBUG oslo_concurrency.lockutils [req-da99bd87-f374-446e-a364-a42ad5e89a7c req-8e40ee24-e9c9-41d5-8fba-ecc9ec16278d service nova] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.002668] env[68233]: DEBUG oslo_concurrency.lockutils [req-da99bd87-f374-446e-a364-a42ad5e89a7c req-8e40ee24-e9c9-41d5-8fba-ecc9ec16278d service nova] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.002920] env[68233]: DEBUG nova.compute.manager [req-da99bd87-f374-446e-a364-a42ad5e89a7c req-8e40ee24-e9c9-41d5-8fba-ecc9ec16278d service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] No waiting events found dispatching network-vif-plugged-e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1226.003166] env[68233]: WARNING nova.compute.manager [req-da99bd87-f374-446e-a364-a42ad5e89a7c req-8e40ee24-e9c9-41d5-8fba-ecc9ec16278d service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Received unexpected event network-vif-plugged-e8159e73-4c22-4b53-8100-11a2cbfb9853 for instance with vm_state building and task_state spawning. [ 1226.009035] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c948108-3f48-49c4-ab4b-582b98c0cb1c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.024179] env[68233]: DEBUG nova.compute.provider_tree [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.071572] env[68233]: DEBUG nova.network.neutron [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Successfully updated port: e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1226.527763] env[68233]: DEBUG nova.scheduler.client.report [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.578284] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1226.578469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1226.578607] env[68233]: DEBUG nova.network.neutron [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1226.582867] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.583000] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.583219] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.033948] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.242s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.062241] env[68233]: INFO nova.network.neutron [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating port b4077afe-a7b6-4653-be23-4c735d67fa05 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1227.108068] env[68233]: DEBUG nova.network.neutron [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1227.232637] env[68233]: DEBUG nova.network.neutron [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updating instance_info_cache with network_info: [{"id": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "address": "fa:16:3e:4b:8d:00", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8159e73-4c", "ovs_interfaceid": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1227.647529] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.648096] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.648096] env[68233]: DEBUG nova.network.neutron [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1227.735693] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.736071] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Instance network_info: |[{"id": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "address": "fa:16:3e:4b:8d:00", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8159e73-4c", "ovs_interfaceid": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1227.736532] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:8d:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9ee6f9-33be-4f58-8248-694024ec31d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e8159e73-4c22-4b53-8100-11a2cbfb9853', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1227.743988] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1227.744145] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1227.744377] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-870c68a1-2508-410b-bd3c-18c32fa1e00e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.764360] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1227.764360] env[68233]: value = "task-2783311" [ 1227.764360] env[68233]: _type = "Task" [ 1227.764360] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1227.771735] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783311, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.018819] env[68233]: DEBUG nova.compute.manager [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Received event network-changed-e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1228.018819] env[68233]: DEBUG nova.compute.manager [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Refreshing instance network info cache due to event network-changed-e8159e73-4c22-4b53-8100-11a2cbfb9853. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1228.018819] env[68233]: DEBUG oslo_concurrency.lockutils [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] Acquiring lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.018819] env[68233]: DEBUG oslo_concurrency.lockutils [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] Acquired lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.019275] env[68233]: DEBUG nova.network.neutron [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Refreshing network info cache for port e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1228.274048] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783311, 'name': CreateVM_Task, 'duration_secs': 0.36472} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.274394] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1228.274845] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.275091] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.275418] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1228.275666] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a093ba2d-6a83-4bef-ac4c-6469aae2ba28 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.280025] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1228.280025] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ab95b3-aef5-3134-0129-379a2cda5551" [ 1228.280025] env[68233]: _type = "Task" [ 1228.280025] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.290990] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ab95b3-aef5-3134-0129-379a2cda5551, 'name': SearchDatastore_Task, 'duration_secs': 0.008826} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.291254] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.291473] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1228.291693] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.291839] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.292024] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1228.292259] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd1a4145-dfc3-4a6d-b481-a36139702666 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.298719] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1228.298893] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1228.299557] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0ba9df-b24a-4702-a3c2-82d2e584c07b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.304219] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1228.304219] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5224f5b7-ecff-3f6f-8d5a-9163dbd9535d" [ 1228.304219] env[68233]: _type = "Task" [ 1228.304219] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.311451] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5224f5b7-ecff-3f6f-8d5a-9163dbd9535d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.344495] env[68233]: DEBUG nova.network.neutron [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [{"id": "343ba5d0-d87f-4796-a86c-5a2922804c78", "address": "fa:16:3e:fc:56:ed", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343ba5d0-d8", "ovs_interfaceid": "343ba5d0-d87f-4796-a86c-5a2922804c78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.499816] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1228.499958] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1228.500095] env[68233]: DEBUG nova.network.neutron [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1228.691098] env[68233]: DEBUG nova.network.neutron [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updated VIF entry in instance network info cache for port e8159e73-4c22-4b53-8100-11a2cbfb9853. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1228.691468] env[68233]: DEBUG nova.network.neutron [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updating instance_info_cache with network_info: [{"id": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "address": "fa:16:3e:4b:8d:00", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8159e73-4c", "ovs_interfaceid": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.815037] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5224f5b7-ecff-3f6f-8d5a-9163dbd9535d, 'name': SearchDatastore_Task, 'duration_secs': 0.00769} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1228.815913] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09eabcac-f8f8-4528-b101-e69fcf5c0ae2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.821678] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1228.821678] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bb81fd-43c2-d2fe-1378-c5b87c8cea1f" [ 1228.821678] env[68233]: _type = "Task" [ 1228.821678] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1228.829277] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bb81fd-43c2-d2fe-1378-c5b87c8cea1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1228.847272] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.194720] env[68233]: DEBUG oslo_concurrency.lockutils [req-83383f51-1ab3-46b5-aa55-0448777601e8 req-1c96532c-f38e-4fb9-806b-ced31c595a7e service nova] Releasing lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.196336] env[68233]: DEBUG nova.network.neutron [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1229.332740] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52bb81fd-43c2-d2fe-1378-c5b87c8cea1f, 'name': SearchDatastore_Task, 'duration_secs': 0.009206} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.333130] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.333203] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 916774a9-bfd3-4931-bc3a-1d50471a1c40/916774a9-bfd3-4931-bc3a-1d50471a1c40.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1229.333453] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4af2a7d2-a15b-474d-8baa-6ffdd199c043 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.339680] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1229.339680] env[68233]: value = "task-2783312" [ 1229.339680] env[68233]: _type = "Task" [ 1229.339680] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.346822] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783312, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.356214] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e2d114-4097-41b8-a882-e5009f2a916b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.362024] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e67ced-c208-44df-a856-01fa49faf901 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.699183] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.728598] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='6d413c8e9ddb5a247378beb61f576da5',container_format='bare',created_at=2025-03-06T03:59:54Z,direct_url=,disk_format='vmdk',id=267e83e8-84b0-4301-9a90-8a1f48a5e360,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1427414969-shelved',owner='9df7c30630584a2bb79e798dcc571850',properties=ImageMetaProps,protected=,size=31668224,status='active',tags=,updated_at=2025-03-06T04:00:08Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1229.728855] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1229.729034] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1229.730031] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1229.730031] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1229.730031] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1229.730031] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1229.730301] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1229.730346] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1229.730490] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1229.730672] env[68233]: DEBUG nova.virt.hardware [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1229.731612] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5c0ad7-b043-4049-a67f-1a075aae937a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.740142] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da21a23-af2f-4403-b22a-6db0fca2d3a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.753902] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:31:a2:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4077afe-a7b6-4653-be23-4c735d67fa05', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1229.761340] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1229.761593] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1229.761816] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-914f368a-d014-4027-b728-eabff7b6b151 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.779465] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1229.779465] env[68233]: value = "task-2783313" [ 1229.779465] env[68233]: _type = "Task" [ 1229.779465] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.790000] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783313, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1229.848998] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783312, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431986} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1229.849318] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 916774a9-bfd3-4931-bc3a-1d50471a1c40/916774a9-bfd3-4931-bc3a-1d50471a1c40.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1229.849539] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1229.849789] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5a913a9-9959-4ed3-a24a-99c6437406a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.855325] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1229.855325] env[68233]: value = "task-2783314" [ 1229.855325] env[68233]: _type = "Task" [ 1229.855325] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1229.862589] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783314, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.044902] env[68233]: DEBUG nova.compute.manager [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-vif-plugged-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1230.045574] env[68233]: DEBUG oslo_concurrency.lockutils [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.045811] env[68233]: DEBUG oslo_concurrency.lockutils [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1230.045971] env[68233]: DEBUG oslo_concurrency.lockutils [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.046171] env[68233]: DEBUG nova.compute.manager [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] No waiting events found dispatching network-vif-plugged-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1230.046340] env[68233]: WARNING nova.compute.manager [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received unexpected event network-vif-plugged-b4077afe-a7b6-4653-be23-4c735d67fa05 for instance with vm_state shelved_offloaded and task_state spawning. [ 1230.046514] env[68233]: DEBUG nova.compute.manager [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1230.046666] env[68233]: DEBUG nova.compute.manager [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing instance network info cache due to event network-changed-b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1230.046871] env[68233]: DEBUG oslo_concurrency.lockutils [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] Acquiring lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.047018] env[68233]: DEBUG oslo_concurrency.lockutils [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] Acquired lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.047191] env[68233]: DEBUG nova.network.neutron [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Refreshing network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1230.289592] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783313, 'name': CreateVM_Task, 'duration_secs': 0.354213} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.289786] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1230.290456] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.290623] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.291014] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1230.291279] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c7c9e1f-ea19-4619-9635-e750a8d5774e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.295659] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1230.295659] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b660ad-9de2-8f98-7b46-fc0a39a61224" [ 1230.295659] env[68233]: _type = "Task" [ 1230.295659] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.303974] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52b660ad-9de2-8f98-7b46-fc0a39a61224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.364025] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783314, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065905} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.364334] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1230.365146] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c905ddb-2d78-4049-b784-ffbeef3bf77a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.386921] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] 916774a9-bfd3-4931-bc3a-1d50471a1c40/916774a9-bfd3-4931-bc3a-1d50471a1c40.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1230.387162] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5286083e-f51a-4b3c-bf74-67396c5a35f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.405451] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1230.405451] env[68233]: value = "task-2783315" [ 1230.405451] env[68233]: _type = "Task" [ 1230.405451] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.412557] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783315, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.459031] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c7b0ed-9674-4592-ad29-104ebfa6c9f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.477406] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63dc914a-a996-47d1-9bbc-dc031b261f71 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.484048] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1230.774629] env[68233]: DEBUG nova.network.neutron [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updated VIF entry in instance network info cache for port b4077afe-a7b6-4653-be23-4c735d67fa05. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1230.775050] env[68233]: DEBUG nova.network.neutron [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [{"id": "b4077afe-a7b6-4653-be23-4c735d67fa05", "address": "fa:16:3e:31:a2:0a", "network": {"id": "659aff47-e460-425b-8211-10c04597b4c7", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1564058774-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9df7c30630584a2bb79e798dcc571850", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4077afe-a7", "ovs_interfaceid": "b4077afe-a7b6-4653-be23-4c735d67fa05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.805103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.805355] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Processing image 267e83e8-84b0-4301-9a90-8a1f48a5e360 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1230.805589] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1230.805740] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquired lock "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1230.805921] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1230.806191] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-664e9ae6-5e29-4210-aabc-113b4171743f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.831928] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1230.832129] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1230.832837] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62c18545-2677-4d60-8b35-f84430057bea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.838071] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1230.838071] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52156711-04e4-6161-a5ef-70fb1dbb09e9" [ 1230.838071] env[68233]: _type = "Task" [ 1230.838071] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.845474] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52156711-04e4-6161-a5ef-70fb1dbb09e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.915147] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783315, 'name': ReconfigVM_Task, 'duration_secs': 0.273423} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1230.915401] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Reconfigured VM instance instance-0000007a to attach disk [datastore2] 916774a9-bfd3-4931-bc3a-1d50471a1c40/916774a9-bfd3-4931-bc3a-1d50471a1c40.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1230.916027] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0449a8cd-674f-478e-9f20-71d21d91583a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.922225] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1230.922225] env[68233]: value = "task-2783316" [ 1230.922225] env[68233]: _type = "Task" [ 1230.922225] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.929583] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783316, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1230.990790] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1230.991234] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58cc5c53-563e-450e-b94c-592c210ef5c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.997652] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1230.997652] env[68233]: value = "task-2783317" [ 1230.997652] env[68233]: _type = "Task" [ 1230.997652] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.007406] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.278311] env[68233]: DEBUG oslo_concurrency.lockutils [req-974792a2-4de7-470f-a2ab-45cccab27ed5 req-60a6a6d6-095b-49f7-b1ac-08c25e2453d5 service nova] Releasing lock "refresh_cache-863e15c6-caa4-47aa-902a-7be2c9538687" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.350053] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1231.350513] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Fetch image to [datastore2] OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15/OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1231.350796] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Downloading stream optimized image 267e83e8-84b0-4301-9a90-8a1f48a5e360 to [datastore2] OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15/OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15.vmdk on the data store datastore2 as vApp {{(pid=68233) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1231.351076] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Downloading image file data 267e83e8-84b0-4301-9a90-8a1f48a5e360 to the ESX as VM named 'OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15' {{(pid=68233) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1231.420218] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1231.420218] env[68233]: value = "resgroup-9" [ 1231.420218] env[68233]: _type = "ResourcePool" [ 1231.420218] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1231.420743] env[68233]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-9a14d167-42f5-4a02-b20d-936779a28045 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.446480] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783316, 'name': Rename_Task, 'duration_secs': 0.19464} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.448722] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1231.448722] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease: (returnval){ [ 1231.448722] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c97a5-c371-96ff-e2b4-8ae2c5e26665" [ 1231.448722] env[68233]: _type = "HttpNfcLease" [ 1231.448722] env[68233]: } obtained for vApp import into resource pool (val){ [ 1231.448722] env[68233]: value = "resgroup-9" [ 1231.448722] env[68233]: _type = "ResourcePool" [ 1231.448722] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1231.448722] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the lease: (returnval){ [ 1231.448722] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c97a5-c371-96ff-e2b4-8ae2c5e26665" [ 1231.448722] env[68233]: _type = "HttpNfcLease" [ 1231.448722] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1231.448722] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81b4c3ae-3610-4dfc-bd27-bfedb3725ceb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.457506] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1231.457506] env[68233]: value = "task-2783319" [ 1231.457506] env[68233]: _type = "Task" [ 1231.457506] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.457784] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1231.457784] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c97a5-c371-96ff-e2b4-8ae2c5e26665" [ 1231.457784] env[68233]: _type = "HttpNfcLease" [ 1231.457784] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1231.464723] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783319, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.506849] env[68233]: DEBUG oslo_vmware.api [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783317, 'name': PowerOnVM_Task, 'duration_secs': 0.450925} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.507175] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1231.507410] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-6959d445-ad5e-4f5a-a06a-2b71a6327345 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance 'af8d2b01-b0a5-408b-ace3-dd085097b393' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1231.958637] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1231.958637] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c97a5-c371-96ff-e2b4-8ae2c5e26665" [ 1231.958637] env[68233]: _type = "HttpNfcLease" [ 1231.958637] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1231.966431] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783319, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.459971] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1232.459971] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c97a5-c371-96ff-e2b4-8ae2c5e26665" [ 1232.459971] env[68233]: _type = "HttpNfcLease" [ 1232.459971] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1232.462837] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1232.462837] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524c97a5-c371-96ff-e2b4-8ae2c5e26665" [ 1232.462837] env[68233]: _type = "HttpNfcLease" [ 1232.462837] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1232.463551] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf67caf-96be-49ef-b63b-a796d109685c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.470582] env[68233]: DEBUG oslo_vmware.api [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783319, 'name': PowerOnVM_Task, 'duration_secs': 0.570181} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.473515] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1232.473722] env[68233]: INFO nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Took 6.66 seconds to spawn the instance on the hypervisor. [ 1232.473909] env[68233]: DEBUG nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1232.474222] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7e88f-025b-b7f5-fa34-05302312ecdd/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1232.474385] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating HTTP connection to write to file with size = 31668224 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7e88f-025b-b7f5-fa34-05302312ecdd/disk-0.vmdk. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1232.476275] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a442ce-fd72-47ca-8399-f7b480603f10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.539593] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.539842] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.540133] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.540346] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.540518] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.551680] env[68233]: INFO nova.compute.manager [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Terminating instance [ 1232.553361] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d629a7b2-ba61-4784-ad45-ffd96dcbee65 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.050722] env[68233]: INFO nova.compute.manager [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Took 12.92 seconds to build instance. [ 1233.065590] env[68233]: DEBUG nova.compute.manager [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1233.065799] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1233.068379] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b49529-be39-4a28-933e-225cc1c512c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.080322] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1233.080677] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98789259-2785-4e03-911c-993ab0df6056 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.090538] env[68233]: DEBUG oslo_vmware.api [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1233.090538] env[68233]: value = "task-2783320" [ 1233.090538] env[68233]: _type = "Task" [ 1233.090538] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.099248] env[68233]: DEBUG oslo_vmware.api [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.552358] env[68233]: DEBUG oslo_concurrency.lockutils [None req-98b416f3-89dc-4f14-84d7-ab35c6aaa54e tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.430s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.599908] env[68233]: DEBUG oslo_vmware.api [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783320, 'name': PowerOffVM_Task, 'duration_secs': 0.275892} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.600239] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1233.600359] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1233.600686] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a533d735-1a30-4faa-959d-71735e2dbc13 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.678658] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1233.678922] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1233.679139] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Deleting the datastore file [datastore2] c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1233.680286] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fa63745-a870-456d-a78d-dde47197a7d3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.688061] env[68233]: DEBUG oslo_vmware.api [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1233.688061] env[68233]: value = "task-2783322" [ 1233.688061] env[68233]: _type = "Task" [ 1233.688061] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.696547] env[68233]: DEBUG oslo_vmware.api [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783322, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.730282] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1233.730614] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7e88f-025b-b7f5-fa34-05302312ecdd/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1233.731752] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27b0fdb-0d4c-4329-a56c-34f89c8e0610 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.738943] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7e88f-025b-b7f5-fa34-05302312ecdd/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1233.739172] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7e88f-025b-b7f5-fa34-05302312ecdd/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1233.739404] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-fe327877-70e1-4580-a142-e3ce44703c56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.932186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "af8d2b01-b0a5-408b-ace3-dd085097b393" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.932186] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.932186] env[68233]: DEBUG nova.compute.manager [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Going to confirm migration 8 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1233.946183] env[68233]: DEBUG oslo_vmware.rw_handles [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e7e88f-025b-b7f5-fa34-05302312ecdd/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1233.946374] env[68233]: INFO nova.virt.vmwareapi.images [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Downloaded image file data 267e83e8-84b0-4301-9a90-8a1f48a5e360 [ 1233.947156] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf12704e-c9f0-4f2c-9f5e-0054f6edf968 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.963600] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06979e68-4bff-4870-bbfb-0b114a8dfb36 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.995696] env[68233]: DEBUG nova.compute.manager [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Received event network-changed-e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1233.995888] env[68233]: DEBUG nova.compute.manager [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Refreshing instance network info cache due to event network-changed-e8159e73-4c22-4b53-8100-11a2cbfb9853. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1233.996158] env[68233]: DEBUG oslo_concurrency.lockutils [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] Acquiring lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.996305] env[68233]: DEBUG oslo_concurrency.lockutils [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] Acquired lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.996959] env[68233]: DEBUG nova.network.neutron [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Refreshing network info cache for port e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1233.998957] env[68233]: INFO nova.virt.vmwareapi.images [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] The imported VM was unregistered [ 1234.001276] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1234.001460] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Creating directory with path [datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1234.002043] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0ac486a-b1c5-494d-9a57-abf10ab7b2a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.038278] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Created directory with path [datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1234.038618] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15/OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15.vmdk to [datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk. {{(pid=68233) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1234.038861] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-242e603b-6954-4017-b426-4a9df51489a8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.045405] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1234.045405] env[68233]: value = "task-2783324" [ 1234.045405] env[68233]: _type = "Task" [ 1234.045405] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.054825] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.198752] env[68233]: DEBUG oslo_vmware.api [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783322, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197335} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.199043] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1234.199217] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1234.199389] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1234.199567] env[68233]: INFO nova.compute.manager [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1234.199814] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1234.200023] env[68233]: DEBUG nova.compute.manager [-] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1234.200142] env[68233]: DEBUG nova.network.neutron [-] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1234.522336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.522336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquired lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.522336] env[68233]: DEBUG nova.network.neutron [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1234.522336] env[68233]: DEBUG nova.objects.instance [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'info_cache' on Instance uuid af8d2b01-b0a5-408b-ace3-dd085097b393 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1234.556460] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.779820] env[68233]: DEBUG nova.network.neutron [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updated VIF entry in instance network info cache for port e8159e73-4c22-4b53-8100-11a2cbfb9853. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1234.780340] env[68233]: DEBUG nova.network.neutron [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updating instance_info_cache with network_info: [{"id": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "address": "fa:16:3e:4b:8d:00", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape8159e73-4c", "ovs_interfaceid": "e8159e73-4c22-4b53-8100-11a2cbfb9853", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.056758] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.134571] env[68233]: DEBUG nova.network.neutron [-] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.283017] env[68233]: DEBUG oslo_concurrency.lockutils [req-f34e1950-cc46-4ecc-b83a-0827308150d1 req-7cede695-1065-452f-bc7e-3db0068d47ab service nova] Releasing lock "refresh_cache-916774a9-bfd3-4931-bc3a-1d50471a1c40" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.557804] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.638192] env[68233]: INFO nova.compute.manager [-] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Took 1.44 seconds to deallocate network for instance. [ 1235.853516] env[68233]: DEBUG nova.network.neutron [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [{"id": "343ba5d0-d87f-4796-a86c-5a2922804c78", "address": "fa:16:3e:fc:56:ed", "network": {"id": "78df386a-75c0-4527-8f92-5372bb6fd767", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-602796220-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "14d2a0ead80a4efba8420023c31f8f11", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27abaf31-0f39-428c-a8d3-cd7548de6818", "external-id": "nsx-vlan-transportzone-505", "segmentation_id": 505, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap343ba5d0-d8", "ovs_interfaceid": "343ba5d0-d87f-4796-a86c-5a2922804c78", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1236.021904] env[68233]: DEBUG nova.compute.manager [req-9f058253-dbed-4b5e-8f57-77acb2397bf3 req-01ee0cb9-f2fd-419b-b2a7-032c08096edd service nova] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Received event network-vif-deleted-16ec4545-d69d-43bf-a956-54414f895c1e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1236.057655] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.148249] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.148552] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.148780] env[68233]: DEBUG nova.objects.instance [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'resources' on Instance uuid c2d04b37-3eae-46cb-a227-b62d36c62a6a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.357505] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Releasing lock "refresh_cache-af8d2b01-b0a5-408b-ace3-dd085097b393" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.357773] env[68233]: DEBUG nova.objects.instance [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'migration_context' on Instance uuid af8d2b01-b0a5-408b-ace3-dd085097b393 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.560364] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.782080] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5eff68-493a-4c33-9f86-804f6fcb822b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.790060] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32682bf-135b-4f78-bb8f-e13faeb5cd1d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.820348] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b78ac9d-da44-442a-9853-8f8f03acee7d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.828178] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cb415a-3cab-4386-abee-0e97516da395 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.841834] env[68233]: DEBUG nova.compute.provider_tree [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1236.860340] env[68233]: DEBUG nova.objects.base [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1236.861203] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a055e1-f97e-4f5a-939f-753558db5bae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.879410] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf03e85a-dbb0-4b6c-acea-fdec7dc54d93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.885124] env[68233]: DEBUG oslo_vmware.api [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1236.885124] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5289d037-8d3a-51fe-da9a-7571f51f6aa9" [ 1236.885124] env[68233]: _type = "Task" [ 1236.885124] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.893326] env[68233]: DEBUG oslo_vmware.api [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5289d037-8d3a-51fe-da9a-7571f51f6aa9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.060393] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.362960] env[68233]: ERROR nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [req-286d5a8a-41ea-44e0-9c4f-dd438c24465c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-286d5a8a-41ea-44e0-9c4f-dd438c24465c"}]} [ 1237.380623] env[68233]: DEBUG nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1237.394327] env[68233]: DEBUG oslo_vmware.api [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5289d037-8d3a-51fe-da9a-7571f51f6aa9, 'name': SearchDatastore_Task, 'duration_secs': 0.027027} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.394596] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.395465] env[68233]: DEBUG nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1237.395668] env[68233]: DEBUG nova.compute.provider_tree [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1237.405864] env[68233]: DEBUG nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1237.422899] env[68233]: DEBUG nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1237.528630] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f1d578-1819-41a0-80f4-ad43c768b3d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.537233] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447a5867-2dd0-467e-8a08-a250809e3183 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.570046] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4705864a-da87-4138-8edc-e9e1459f2f86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.577128] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783324, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.148032} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.579181] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15/OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15.vmdk to [datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk. [ 1237.579383] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Cleaning up location [datastore2] OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1237.579552] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_d49e2cec-b6e1-4b83-94d3-f8d06524ae15 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1237.579827] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c496e3e-87b7-41df-a668-161a80cb8327 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.582288] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004a8a12-7234-4f53-a6fa-8d6f9063d257 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.595476] env[68233]: DEBUG nova.compute.provider_tree [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1237.597806] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1237.597806] env[68233]: value = "task-2783325" [ 1237.597806] env[68233]: _type = "Task" [ 1237.597806] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.604934] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783325, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.110911] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783325, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33787} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.111192] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1238.111364] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Releasing lock "[datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1238.111601] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk to [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1238.111850] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2164a41-047b-4603-b250-9b0b4e96aef4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.118521] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1238.118521] env[68233]: value = "task-2783326" [ 1238.118521] env[68233]: _type = "Task" [ 1238.118521] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.128769] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783326, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.129800] env[68233]: DEBUG nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 173 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1238.130010] env[68233]: DEBUG nova.compute.provider_tree [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 173 to 174 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1238.130202] env[68233]: DEBUG nova.compute.provider_tree [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1238.628704] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783326, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.634661] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.486s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.637114] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.242s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.653151] env[68233]: INFO nova.scheduler.client.report [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Deleted allocations for instance c2d04b37-3eae-46cb-a227-b62d36c62a6a [ 1239.129520] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783326, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.162488] env[68233]: DEBUG oslo_concurrency.lockutils [None req-efda8ea9-cd68-4ba1-bb57-e7d75dabcfc5 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "c2d04b37-3eae-46cb-a227-b62d36c62a6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.622s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.258695] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbea5cd9-592e-4a59-873f-536a7219ffeb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.266765] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7907e37d-0894-4d0a-898d-f78db2f7cb6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.298667] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61ee49c-abfb-4fca-bdfa-0203f8c1f273 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.306422] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd68a194-b686-4329-af62-4449b4867c7f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.320940] env[68233]: DEBUG nova.compute.provider_tree [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1239.633315] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783326, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.824342] env[68233]: DEBUG nova.scheduler.client.report [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1240.130481] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783326, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.631630] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783326, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.274932} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.631898] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/267e83e8-84b0-4301-9a90-8a1f48a5e360/267e83e8-84b0-4301-9a90-8a1f48a5e360.vmdk to [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1240.632752] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072dace9-4182-43a4-8475-495a9a92c515 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.655981] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1240.656872] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b35f6ec-99a0-4f0e-8935-b62b0a23178f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.677770] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1240.677770] env[68233]: value = "task-2783327" [ 1240.677770] env[68233]: _type = "Task" [ 1240.677770] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1240.687356] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783327, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.837379] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.200s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.188420] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.391048] env[68233]: INFO nova.scheduler.client.report [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted allocation for migration 95577185-409f-424f-929a-13e5d6c05d91 [ 1241.688820] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783327, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.734916] env[68233]: INFO nova.compute.manager [None req-eea781d2-8045-43cb-8e4a-4b3f571cb3cb tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Get console output [ 1241.735222] env[68233]: WARNING nova.virt.vmwareapi.driver [None req-eea781d2-8045-43cb-8e4a-4b3f571cb3cb tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] The console log is missing. Check your VSPC configuration [ 1241.791451] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.791710] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.896728] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c81715a2-6134-4f0d-9293-314106192c5e tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.965s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1242.189076] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783327, 'name': ReconfigVM_Task, 'duration_secs': 1.080506} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.189354] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Reconfigured VM instance instance-00000074 to attach disk [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687/863e15c6-caa4-47aa-902a-7be2c9538687.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1242.189979] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f3aa7ca-9410-4d1a-99ef-8a96a78ae9a4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.195928] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1242.195928] env[68233]: value = "task-2783328" [ 1242.195928] env[68233]: _type = "Task" [ 1242.195928] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.203048] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783328, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.294873] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1242.705576] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783328, 'name': Rename_Task, 'duration_secs': 0.145266} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.705875] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1242.706109] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08d321ac-847c-4c54-9185-f131ce3c282a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.711433] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1242.711433] env[68233]: value = "task-2783329" [ 1242.711433] env[68233]: _type = "Task" [ 1242.711433] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.718531] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783329, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.816133] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1242.816424] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1242.817978] env[68233]: INFO nova.compute.claims [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1243.221775] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783329, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.722575] env[68233]: DEBUG oslo_vmware.api [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783329, 'name': PowerOnVM_Task, 'duration_secs': 0.513512} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.722891] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1243.819910] env[68233]: DEBUG nova.compute.manager [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.820862] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60151777-8132-4efe-ab8c-087b45540cee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.918407] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20cadad2-b40a-488b-b786-1b4cf84eedb3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.925954] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aef656c-4bc3-45d0-9ac4-a2d8ea393544 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.957078] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741ad363-39ea-42a1-a768-b674fec0d95c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.964041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b2f82f-15d0-43a6-97d0-d518a8b5e1f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.976893] env[68233]: DEBUG nova.compute.provider_tree [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1244.341859] env[68233]: DEBUG oslo_concurrency.lockutils [None req-977754e5-3c63-4a0f-b806-2b95f1b423c5 tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.226s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.480051] env[68233]: DEBUG nova.scheduler.client.report [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1244.984820] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.985332] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1245.490554] env[68233]: DEBUG nova.compute.utils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1245.491992] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1245.492188] env[68233]: DEBUG nova.network.neutron [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1245.537299] env[68233]: DEBUG nova.policy [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3be589685b874d76b3753c06d4fc0877', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a664e5702b9d44908d10f7e0f75ffce6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1245.787808] env[68233]: DEBUG nova.network.neutron [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Successfully created port: fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1245.995389] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1247.005024] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1247.030039] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1247.030200] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1247.030352] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1247.030526] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1247.030797] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1247.030882] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1247.031025] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1247.031191] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1247.031364] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1247.031526] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1247.031699] env[68233]: DEBUG nova.virt.hardware [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1247.032568] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa43c1bf-018f-4a0f-8475-da9cfd7e787a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.040600] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ed38c7-44e5-4fce-94c7-823d8f16b839 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.129316] env[68233]: DEBUG nova.compute.manager [req-93c3b758-a369-4dd4-b080-b4670810c192 req-d2f1dee9-8e2b-4d8e-bc67-5d26dffbb383 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Received event network-vif-plugged-fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1247.129453] env[68233]: DEBUG oslo_concurrency.lockutils [req-93c3b758-a369-4dd4-b080-b4670810c192 req-d2f1dee9-8e2b-4d8e-bc67-5d26dffbb383 service nova] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.129634] env[68233]: DEBUG oslo_concurrency.lockutils [req-93c3b758-a369-4dd4-b080-b4670810c192 req-d2f1dee9-8e2b-4d8e-bc67-5d26dffbb383 service nova] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.129806] env[68233]: DEBUG oslo_concurrency.lockutils [req-93c3b758-a369-4dd4-b080-b4670810c192 req-d2f1dee9-8e2b-4d8e-bc67-5d26dffbb383 service nova] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.129973] env[68233]: DEBUG nova.compute.manager [req-93c3b758-a369-4dd4-b080-b4670810c192 req-d2f1dee9-8e2b-4d8e-bc67-5d26dffbb383 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] No waiting events found dispatching network-vif-plugged-fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1247.130156] env[68233]: WARNING nova.compute.manager [req-93c3b758-a369-4dd4-b080-b4670810c192 req-d2f1dee9-8e2b-4d8e-bc67-5d26dffbb383 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Received unexpected event network-vif-plugged-fb60507e-d1d4-46ce-8f26-4219cd56b0ad for instance with vm_state building and task_state spawning. [ 1247.210899] env[68233]: DEBUG nova.network.neutron [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Successfully updated port: fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1247.713502] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.713682] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.713801] env[68233]: DEBUG nova.network.neutron [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1248.245544] env[68233]: DEBUG nova.network.neutron [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1248.577100] env[68233]: DEBUG nova.network.neutron [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating instance_info_cache with network_info: [{"id": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "address": "fa:16:3e:27:3c:a0", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb60507e-d1", "ovs_interfaceid": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.079834] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1249.080220] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Instance network_info: |[{"id": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "address": "fa:16:3e:27:3c:a0", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb60507e-d1", "ovs_interfaceid": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1249.080668] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:3c:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4712af2-45ef-4652-8d2c-482ec70056d0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb60507e-d1d4-46ce-8f26-4219cd56b0ad', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1249.088241] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1249.088439] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1249.088688] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53dd8dc5-d636-4031-a674-4abd806acfd1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.109786] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1249.109786] env[68233]: value = "task-2783330" [ 1249.109786] env[68233]: _type = "Task" [ 1249.109786] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.117335] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783330, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.154166] env[68233]: DEBUG nova.compute.manager [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Received event network-changed-fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1249.154411] env[68233]: DEBUG nova.compute.manager [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Refreshing instance network info cache due to event network-changed-fb60507e-d1d4-46ce-8f26-4219cd56b0ad. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1249.154542] env[68233]: DEBUG oslo_concurrency.lockutils [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] Acquiring lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.154689] env[68233]: DEBUG oslo_concurrency.lockutils [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] Acquired lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.154851] env[68233]: DEBUG nova.network.neutron [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Refreshing network info cache for port fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1249.619673] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783330, 'name': CreateVM_Task, 'duration_secs': 0.319747} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.620158] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1249.620535] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.620719] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1249.621035] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1249.621283] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4c4a35e-74be-4be8-ac51-7f710720aa41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.625293] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1249.625293] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528ac18a-c2b0-8e14-4b3f-33a5bdb03d8b" [ 1249.625293] env[68233]: _type = "Task" [ 1249.625293] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.632110] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528ac18a-c2b0-8e14-4b3f-33a5bdb03d8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.862858] env[68233]: DEBUG nova.network.neutron [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updated VIF entry in instance network info cache for port fb60507e-d1d4-46ce-8f26-4219cd56b0ad. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1249.863229] env[68233]: DEBUG nova.network.neutron [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating instance_info_cache with network_info: [{"id": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "address": "fa:16:3e:27:3c:a0", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb60507e-d1", "ovs_interfaceid": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.136893] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]528ac18a-c2b0-8e14-4b3f-33a5bdb03d8b, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.137189] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.137416] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1250.137646] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.137793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.137969] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1250.138228] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8611e193-cb58-464b-82ed-7e698242ebb6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.147152] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1250.147327] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1250.147993] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9205f60-3cc3-4ae8-bc68-28bbcc8c9048 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.152476] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1250.152476] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520a8ec6-1af0-db1b-b145-3c5ffae73e12" [ 1250.152476] env[68233]: _type = "Task" [ 1250.152476] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.159641] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520a8ec6-1af0-db1b-b145-3c5ffae73e12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.366455] env[68233]: DEBUG oslo_concurrency.lockutils [req-e75cb4f9-d117-4b70-b0ad-ffc8afb00b94 req-c517eb5c-6aef-475d-8d52-9b3a720733fa service nova] Releasing lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1250.663259] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520a8ec6-1af0-db1b-b145-3c5ffae73e12, 'name': SearchDatastore_Task, 'duration_secs': 0.008352} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.663956] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a990195f-cb79-4bd5-8a5b-d9f32fae9ead {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.668630] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1250.668630] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5247b88c-8e6e-a4fa-28c7-e17c72a6bb2b" [ 1250.668630] env[68233]: _type = "Task" [ 1250.668630] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1250.675742] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5247b88c-8e6e-a4fa-28c7-e17c72a6bb2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.179890] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5247b88c-8e6e-a4fa-28c7-e17c72a6bb2b, 'name': SearchDatastore_Task, 'duration_secs': 0.010018} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.180187] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.180452] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 280bc403-3d10-4a29-9507-c548d9cf1d1a/280bc403-3d10-4a29-9507-c548d9cf1d1a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1251.180740] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dee2da2e-e7b9-4bc6-a63b-0d7ed0b560d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.187853] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1251.187853] env[68233]: value = "task-2783331" [ 1251.187853] env[68233]: _type = "Task" [ 1251.187853] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.195622] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.700063] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.448272} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1251.700432] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 280bc403-3d10-4a29-9507-c548d9cf1d1a/280bc403-3d10-4a29-9507-c548d9cf1d1a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1251.700665] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1251.700984] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-829b454c-8be4-4a61-8ecf-4e03f47c2aa0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.708279] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1251.708279] env[68233]: value = "task-2783332" [ 1251.708279] env[68233]: _type = "Task" [ 1251.708279] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.718078] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783332, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.218723] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783332, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057629} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.218984] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1252.219758] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d906c7c3-6bd3-4f71-aa35-2725239b5f56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.241020] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] 280bc403-3d10-4a29-9507-c548d9cf1d1a/280bc403-3d10-4a29-9507-c548d9cf1d1a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1252.241252] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de3c1c56-d507-4d56-9773-73fd282b8413 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.260045] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1252.260045] env[68233]: value = "task-2783333" [ 1252.260045] env[68233]: _type = "Task" [ 1252.260045] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.267331] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783333, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.769465] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783333, 'name': ReconfigVM_Task, 'duration_secs': 0.287652} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.769867] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfigured VM instance instance-0000007b to attach disk [datastore2] 280bc403-3d10-4a29-9507-c548d9cf1d1a/280bc403-3d10-4a29-9507-c548d9cf1d1a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1252.770337] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0d4b7e24-6fb9-4cb2-bfe0-83e41815418e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.775782] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1252.775782] env[68233]: value = "task-2783334" [ 1252.775782] env[68233]: _type = "Task" [ 1252.775782] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.782799] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783334, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.284852] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783334, 'name': Rename_Task, 'duration_secs': 0.134549} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.285123] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1253.285393] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1736afb5-653c-4118-b674-fc1857cc7dad {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.292512] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1253.292512] env[68233]: value = "task-2783335" [ 1253.292512] env[68233]: _type = "Task" [ 1253.292512] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.302498] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.801539] env[68233]: DEBUG oslo_vmware.api [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783335, 'name': PowerOnVM_Task, 'duration_secs': 0.432245} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.801929] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1253.802052] env[68233]: INFO nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1253.802209] env[68233]: DEBUG nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1253.802938] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016fa08f-b1bb-407f-9e5a-49618d8b25c2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.321558] env[68233]: INFO nova.compute.manager [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Took 11.52 seconds to build instance. [ 1254.708166] env[68233]: DEBUG nova.compute.manager [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Received event network-changed-fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1254.708386] env[68233]: DEBUG nova.compute.manager [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Refreshing instance network info cache due to event network-changed-fb60507e-d1d4-46ce-8f26-4219cd56b0ad. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1254.708603] env[68233]: DEBUG oslo_concurrency.lockutils [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] Acquiring lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.708757] env[68233]: DEBUG oslo_concurrency.lockutils [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] Acquired lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.708922] env[68233]: DEBUG nova.network.neutron [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Refreshing network info cache for port fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1254.823149] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f9bc6915-8b79-4a26-b756-8b0799d4252f tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.031s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.415115] env[68233]: DEBUG nova.network.neutron [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updated VIF entry in instance network info cache for port fb60507e-d1d4-46ce-8f26-4219cd56b0ad. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1255.415524] env[68233]: DEBUG nova.network.neutron [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating instance_info_cache with network_info: [{"id": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "address": "fa:16:3e:27:3c:a0", "network": {"id": "3322f7d2-a422-422f-8159-f39bf6856c83", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-797741408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a664e5702b9d44908d10f7e0f75ffce6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4712af2-45ef-4652-8d2c-482ec70056d0", "external-id": "nsx-vlan-transportzone-826", "segmentation_id": 826, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb60507e-d1", "ovs_interfaceid": "fb60507e-d1d4-46ce-8f26-4219cd56b0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1255.918506] env[68233]: DEBUG oslo_concurrency.lockutils [req-7ccafa79-67bf-440f-8c22-d5b4acf98875 req-b961b143-cc44-4f17-ad66-18b8c8fa02d6 service nova] Releasing lock "refresh_cache-280bc403-3d10-4a29-9507-c548d9cf1d1a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.834358] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1256.834554] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.340496] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.340810] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.340862] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.340974] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.341140] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.341288] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.341530] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1257.341620] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1257.843963] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.844230] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.844340] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.844492] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1257.845450] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232fcb7b-b56b-4962-8bbc-ee42825f818a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.853657] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293eb16f-5b0a-430c-911c-3c8efb9126b8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.867283] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e98e99-39a1-4c90-91bc-6fc2048d4b3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.873188] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd960040-5aac-44e1-a7c2-63684746bf66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.901430] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179345MB free_disk=175GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1257.901574] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.901779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.936344] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.936698] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance aadc7dbe-456c-4bf3-b26d-bac672459fb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.936698] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 81e0800d-7731-433c-9238-b4aa07a4ddda actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.936819] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance f24af50e-90cd-4398-84d1-a1e1849d01d6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.936859] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance af8d2b01-b0a5-408b-ace3-dd085097b393 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.936957] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 916774a9-bfd3-4931-bc3a-1d50471a1c40 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.937088] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 863e15c6-caa4-47aa-902a-7be2c9538687 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.937217] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 280bc403-3d10-4a29-9507-c548d9cf1d1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1258.937749] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1258.937749] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1259.041519] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d4feb2-7700-4480-939d-c4734677f2be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.051418] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4657c2-ff78-472c-ab70-738e015606ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.083380] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e393814-b3c0-4223-a668-a0324d377997 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.091015] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e21a72d-365d-4428-a07f-dc972ce0bfc3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.105046] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1259.534740] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.535023] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.535231] env[68233]: DEBUG nova.compute.manager [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1259.536171] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95eb498e-34f8-4d33-aab9-8539a2d0d8c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.542699] env[68233]: DEBUG nova.compute.manager [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1259.543243] env[68233]: DEBUG nova.objects.instance [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'flavor' on Instance uuid f24af50e-90cd-4398-84d1-a1e1849d01d6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.607693] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1260.111996] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1260.112387] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.210s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.550849] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.551205] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef4cca8d-5eff-44f4-aa75-352e09351152 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.559113] env[68233]: DEBUG oslo_vmware.api [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1260.559113] env[68233]: value = "task-2783336" [ 1260.559113] env[68233]: _type = "Task" [ 1260.559113] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.566974] env[68233]: DEBUG oslo_vmware.api [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.068812] env[68233]: DEBUG oslo_vmware.api [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783336, 'name': PowerOffVM_Task, 'duration_secs': 0.249706} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.069079] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.069290] env[68233]: DEBUG nova.compute.manager [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1261.070056] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee9a059-999c-4cda-8939-74235927752f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.581129] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b159891-216d-4c82-9b90-b4bac4150e29 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.893918] env[68233]: DEBUG nova.objects.instance [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'flavor' on Instance uuid f24af50e-90cd-4398-84d1-a1e1849d01d6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.399151] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1262.399336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1262.399514] env[68233]: DEBUG nova.network.neutron [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1262.399690] env[68233]: DEBUG nova.objects.instance [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'info_cache' on Instance uuid f24af50e-90cd-4398-84d1-a1e1849d01d6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.903212] env[68233]: DEBUG nova.objects.base [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1263.600105] env[68233]: DEBUG nova.network.neutron [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [{"id": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "address": "fa:16:3e:3e:ff:b1", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape303f634-41", "ovs_interfaceid": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.102718] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.108737] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1265.109142] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-499d4763-cb64-4bb6-b03d-cd3635243673 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.117183] env[68233]: DEBUG oslo_vmware.api [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1265.117183] env[68233]: value = "task-2783337" [ 1265.117183] env[68233]: _type = "Task" [ 1265.117183] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1265.124960] env[68233]: DEBUG oslo_vmware.api [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1265.626794] env[68233]: DEBUG oslo_vmware.api [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783337, 'name': PowerOnVM_Task, 'duration_secs': 0.37361} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1265.627022] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1265.627237] env[68233]: DEBUG nova.compute.manager [None req-8e48183e-dbea-47a6-b743-81fc383681f6 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1265.628041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c878f525-2197-4358-b18f-e7aceb94e886 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.623133] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159106c5-131a-4811-93c9-0e32a1da8856 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.630337] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Suspending the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1266.630592] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d3b6bcd1-6c5f-4393-baec-13fa9a5ace4c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.636819] env[68233]: DEBUG oslo_vmware.api [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1266.636819] env[68233]: value = "task-2783338" [ 1266.636819] env[68233]: _type = "Task" [ 1266.636819] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1266.645995] env[68233]: DEBUG oslo_vmware.api [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783338, 'name': SuspendVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.147597] env[68233]: DEBUG oslo_vmware.api [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783338, 'name': SuspendVM_Task} progress is 62%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1267.646789] env[68233]: DEBUG oslo_vmware.api [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783338, 'name': SuspendVM_Task, 'duration_secs': 0.990429} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1267.648081] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Suspended the VM {{(pid=68233) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1267.648081] env[68233]: DEBUG nova.compute.manager [None req-d6d860d8-6c15-4140-846c-27776a93ccdc tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1267.648493] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e206e2a-6d79-4bda-a65a-45e4b976eee2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.969443] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "af8d2b01-b0a5-408b-ace3-dd085097b393" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.969667] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.969886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1267.970092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1267.970344] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.972392] env[68233]: INFO nova.compute.manager [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Terminating instance [ 1268.475936] env[68233]: DEBUG nova.compute.manager [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1268.476204] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1268.476503] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aedd74f7-1fb3-4086-99b6-e197d7a9c4fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.483487] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1268.483487] env[68233]: value = "task-2783339" [ 1268.483487] env[68233]: _type = "Task" [ 1268.483487] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1268.491598] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783339, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1268.994193] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783339, 'name': PowerOffVM_Task, 'duration_secs': 0.202558} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1268.994486] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1268.994687] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1268.994881] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559534', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'name': 'volume-078fe37a-d525-4476-a117-3378bb38267b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'af8d2b01-b0a5-408b-ace3-dd085097b393', 'attached_at': '2025-03-06T04:00:24.000000', 'detached_at': '', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'serial': '078fe37a-d525-4476-a117-3378bb38267b'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1268.995717] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c57959-a01e-497a-aa82-958c4e411dc7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1268.998297] env[68233]: INFO nova.compute.manager [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Resuming [ 1268.998840] env[68233]: DEBUG nova.objects.instance [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'flavor' on Instance uuid f24af50e-90cd-4398-84d1-a1e1849d01d6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1269.017056] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee80763-26e5-4bd4-ab61-241233242752 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.025118] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34764e87-286d-454c-9293-ab2c442d27b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.043294] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2c5b76-5d5a-4c91-9b44-5cf0e5c0264f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.057773] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] The volume has not been displaced from its original location: [datastore2] volume-078fe37a-d525-4476-a117-3378bb38267b/volume-078fe37a-d525-4476-a117-3378bb38267b.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1269.062957] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1269.063512] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bf5edc7-3de3-465a-a1c6-3941a80fab67 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.082464] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1269.082464] env[68233]: value = "task-2783340" [ 1269.082464] env[68233]: _type = "Task" [ 1269.082464] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.089985] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783340, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1269.591541] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783340, 'name': ReconfigVM_Task, 'duration_secs': 0.147171} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1269.591831] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1269.596451] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffb65822-47f3-4661-91bb-8e720cf0f1bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.612355] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1269.612355] env[68233]: value = "task-2783341" [ 1269.612355] env[68233]: _type = "Task" [ 1269.612355] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.620434] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783341, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.122079] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783341, 'name': ReconfigVM_Task, 'duration_secs': 0.135618} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.122079] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559534', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'name': 'volume-078fe37a-d525-4476-a117-3378bb38267b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'af8d2b01-b0a5-408b-ace3-dd085097b393', 'attached_at': '2025-03-06T04:00:24.000000', 'detached_at': '', 'volume_id': '078fe37a-d525-4476-a117-3378bb38267b', 'serial': '078fe37a-d525-4476-a117-3378bb38267b'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1270.122515] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1270.123020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff55420-1e0f-487a-bd3a-de4fcca53f5e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.129134] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1270.129350] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d14da773-4c77-4cc1-8dda-08d93a01be2e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.195097] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1270.195321] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1270.195502] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore2] af8d2b01-b0a5-408b-ace3-dd085097b393 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1270.195775] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a593421-bb97-4980-96cb-4a5311f75365 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1270.202970] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1270.202970] env[68233]: value = "task-2783343" [ 1270.202970] env[68233]: _type = "Task" [ 1270.202970] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1270.210249] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783343, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.509077] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.509278] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquired lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.509481] env[68233]: DEBUG nova.network.neutron [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1270.714027] env[68233]: DEBUG oslo_vmware.api [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783343, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.087962} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1270.714027] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1270.714027] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1270.714027] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1270.714027] env[68233]: INFO nova.compute.manager [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1270.714027] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1270.714365] env[68233]: DEBUG nova.compute.manager [-] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1270.714365] env[68233]: DEBUG nova.network.neutron [-] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1271.193848] env[68233]: DEBUG nova.compute.manager [req-4de50663-4dc0-4864-a11f-b2bbad92414d req-7d6d659e-c05c-41ae-acf9-c77e0733257b service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Received event network-vif-deleted-343ba5d0-d87f-4796-a86c-5a2922804c78 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1271.194247] env[68233]: INFO nova.compute.manager [req-4de50663-4dc0-4864-a11f-b2bbad92414d req-7d6d659e-c05c-41ae-acf9-c77e0733257b service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Neutron deleted interface 343ba5d0-d87f-4796-a86c-5a2922804c78; detaching it from the instance and deleting it from the info cache [ 1271.194247] env[68233]: DEBUG nova.network.neutron [req-4de50663-4dc0-4864-a11f-b2bbad92414d req-7d6d659e-c05c-41ae-acf9-c77e0733257b service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.219841] env[68233]: DEBUG nova.network.neutron [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [{"id": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "address": "fa:16:3e:3e:ff:b1", "network": {"id": "58a0d901-0b93-41be-a895-5e5029a0db10", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-861234459-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dbc7604c87d6485097fe5658d68217b9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape303f634-41", "ovs_interfaceid": "e303f634-41bf-4e57-9c9a-6555e22b32bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.674929] env[68233]: DEBUG nova.network.neutron [-] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.696269] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-57a0707c-0629-4f03-a9bb-120d520bbc87 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.706210] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecac3ae-568a-48a9-9445-b1328cb61640 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.722387] env[68233]: DEBUG oslo_concurrency.lockutils [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Releasing lock "refresh_cache-f24af50e-90cd-4398-84d1-a1e1849d01d6" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.723197] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24dfc50-6dd7-4973-b33e-d58cb36cb0bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.734827] env[68233]: DEBUG nova.compute.manager [req-4de50663-4dc0-4864-a11f-b2bbad92414d req-7d6d659e-c05c-41ae-acf9-c77e0733257b service nova] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Detach interface failed, port_id=343ba5d0-d87f-4796-a86c-5a2922804c78, reason: Instance af8d2b01-b0a5-408b-ace3-dd085097b393 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1271.737099] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Resuming the VM {{(pid=68233) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1271.737328] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91ce8be0-f06c-46f1-88a2-47708b9edffd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.743534] env[68233]: DEBUG oslo_vmware.api [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1271.743534] env[68233]: value = "task-2783344" [ 1271.743534] env[68233]: _type = "Task" [ 1271.743534] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.750806] env[68233]: DEBUG oslo_vmware.api [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783344, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.178119] env[68233]: INFO nova.compute.manager [-] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Took 1.46 seconds to deallocate network for instance. [ 1272.254025] env[68233]: DEBUG oslo_vmware.api [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783344, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.726031] env[68233]: INFO nova.compute.manager [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Took 0.55 seconds to detach 1 volumes for instance. [ 1272.728907] env[68233]: DEBUG nova.compute.manager [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Deleting volume: 078fe37a-d525-4476-a117-3378bb38267b {{(pid=68233) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1272.754330] env[68233]: DEBUG oslo_vmware.api [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783344, 'name': PowerOnVM_Task, 'duration_secs': 0.512869} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.755193] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Resumed the VM {{(pid=68233) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1272.755193] env[68233]: DEBUG nova.compute.manager [None req-dab9b880-8f0e-4f1e-b259-516afcd2c598 tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1272.756140] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3947d961-3002-4932-bc50-0955c72dc9d2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.270649] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.271119] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.271805] env[68233]: DEBUG nova.objects.instance [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'resources' on Instance uuid af8d2b01-b0a5-408b-ace3-dd085097b393 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1273.798119] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.798380] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.798585] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "f24af50e-90cd-4398-84d1-a1e1849d01d6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.798768] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.798932] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.801325] env[68233]: INFO nova.compute.manager [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Terminating instance [ 1273.871427] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e7d8f2-68f4-4db7-a7c3-9fd58daf6957 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.879019] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0140bd07-adaf-478b-9e04-280c1a100a0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.908412] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927341d0-3097-4f6c-aea6-7dc6519f5615 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.915731] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38a73da-1394-4700-a539-bb1c09bf02ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.929796] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "916774a9-bfd3-4931-bc3a-1d50471a1c40" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.929958] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.930180] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "916774a9-bfd3-4931-bc3a-1d50471a1c40-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1273.930369] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1273.930535] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.932186] env[68233]: DEBUG nova.compute.provider_tree [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.933572] env[68233]: INFO nova.compute.manager [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Terminating instance [ 1274.305208] env[68233]: DEBUG nova.compute.manager [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1274.305634] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1274.306392] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206adf0a-cb48-441c-ae4b-daccfdf28685 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.313895] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.314127] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6aaaef51-3fd9-4c88-888e-3d34ce9e5dbb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.319921] env[68233]: DEBUG oslo_vmware.api [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1274.319921] env[68233]: value = "task-2783346" [ 1274.319921] env[68233]: _type = "Task" [ 1274.319921] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.327377] env[68233]: DEBUG oslo_vmware.api [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783346, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.436621] env[68233]: DEBUG nova.scheduler.client.report [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1274.440068] env[68233]: DEBUG nova.compute.manager [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1274.440274] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1274.441300] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38702226-1ce6-4ac9-a83a-5479b64676ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.448680] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.448934] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2dfa6dc9-b3aa-4981-bb6f-9e6ca4213b3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.457116] env[68233]: DEBUG oslo_vmware.api [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1274.457116] env[68233]: value = "task-2783347" [ 1274.457116] env[68233]: _type = "Task" [ 1274.457116] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.467131] env[68233]: DEBUG oslo_vmware.api [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.829535] env[68233]: DEBUG oslo_vmware.api [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783346, 'name': PowerOffVM_Task, 'duration_secs': 0.188006} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.829814] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1274.829954] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1274.830203] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-702ce67b-2b83-40fd-afd0-8d108a865e81 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.897529] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1274.897807] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1274.898017] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleting the datastore file [datastore2] f24af50e-90cd-4398-84d1-a1e1849d01d6 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1274.898298] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e0f5220-9dfc-474f-8718-240df6abd353 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.904651] env[68233]: DEBUG oslo_vmware.api [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for the task: (returnval){ [ 1274.904651] env[68233]: value = "task-2783349" [ 1274.904651] env[68233]: _type = "Task" [ 1274.904651] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.912068] env[68233]: DEBUG oslo_vmware.api [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783349, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.941972] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.962240] env[68233]: INFO nova.scheduler.client.report [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted allocations for instance af8d2b01-b0a5-408b-ace3-dd085097b393 [ 1274.968946] env[68233]: DEBUG oslo_vmware.api [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783347, 'name': PowerOffVM_Task, 'duration_secs': 0.200925} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.969382] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1274.969591] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1274.969843] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5db75212-bbe0-4d50-ade0-90ea37c90096 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.045239] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1275.045507] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1275.045669] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleting the datastore file [datastore2] 916774a9-bfd3-4931-bc3a-1d50471a1c40 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1275.045927] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81ef35c1-0633-4887-91d1-e1fc1d8ccee6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.052414] env[68233]: DEBUG oslo_vmware.api [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1275.052414] env[68233]: value = "task-2783351" [ 1275.052414] env[68233]: _type = "Task" [ 1275.052414] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.059891] env[68233]: DEBUG oslo_vmware.api [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.414726] env[68233]: DEBUG oslo_vmware.api [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Task: {'id': task-2783349, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152651} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.415082] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.415137] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1275.415328] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1275.415520] env[68233]: INFO nova.compute.manager [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1275.415780] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1275.415972] env[68233]: DEBUG nova.compute.manager [-] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1275.416079] env[68233]: DEBUG nova.network.neutron [-] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1275.472103] env[68233]: DEBUG oslo_concurrency.lockutils [None req-0f9ae613-5812-4a3b-bb1a-a0f188e7eb14 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "af8d2b01-b0a5-408b-ace3-dd085097b393" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.502s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.562467] env[68233]: DEBUG oslo_vmware.api [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138685} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.562718] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.562904] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1275.563098] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1275.563275] env[68233]: INFO nova.compute.manager [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1275.563520] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1275.563727] env[68233]: DEBUG nova.compute.manager [-] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1275.563819] env[68233]: DEBUG nova.network.neutron [-] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1275.770140] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "81e0800d-7731-433c-9238-b4aa07a4ddda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.770425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.770634] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "81e0800d-7731-433c-9238-b4aa07a4ddda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.770817] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.770985] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.773503] env[68233]: INFO nova.compute.manager [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Terminating instance [ 1276.028872] env[68233]: DEBUG nova.compute.manager [req-63338545-716d-404a-aefc-342f08077c52 req-6000d226-c73f-4f4e-9b39-6e5af351ff23 service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Received event network-vif-deleted-e8159e73-4c22-4b53-8100-11a2cbfb9853 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1276.028872] env[68233]: INFO nova.compute.manager [req-63338545-716d-404a-aefc-342f08077c52 req-6000d226-c73f-4f4e-9b39-6e5af351ff23 service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Neutron deleted interface e8159e73-4c22-4b53-8100-11a2cbfb9853; detaching it from the instance and deleting it from the info cache [ 1276.028872] env[68233]: DEBUG nova.network.neutron [req-63338545-716d-404a-aefc-342f08077c52 req-6000d226-c73f-4f4e-9b39-6e5af351ff23 service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.098886] env[68233]: DEBUG nova.compute.manager [req-67943de8-c57d-4b80-bad7-b16e945e7b0f req-12df809f-21e4-4ac0-b603-1bb1c0aca048 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Received event network-vif-deleted-e303f634-41bf-4e57-9c9a-6555e22b32bc {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1276.099114] env[68233]: INFO nova.compute.manager [req-67943de8-c57d-4b80-bad7-b16e945e7b0f req-12df809f-21e4-4ac0-b603-1bb1c0aca048 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Neutron deleted interface e303f634-41bf-4e57-9c9a-6555e22b32bc; detaching it from the instance and deleting it from the info cache [ 1276.099297] env[68233]: DEBUG nova.network.neutron [req-67943de8-c57d-4b80-bad7-b16e945e7b0f req-12df809f-21e4-4ac0-b603-1bb1c0aca048 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.278130] env[68233]: DEBUG nova.compute.manager [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1276.278130] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1276.278890] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020e8f34-e0b4-4d29-882c-cdff1a712e60 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.287607] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1276.287863] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d8d09bd1-1f26-472b-834b-46fcf133102c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.293905] env[68233]: DEBUG oslo_vmware.api [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1276.293905] env[68233]: value = "task-2783352" [ 1276.293905] env[68233]: _type = "Task" [ 1276.293905] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.301314] env[68233]: DEBUG oslo_vmware.api [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783352, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.510577] env[68233]: DEBUG nova.network.neutron [-] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.531143] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-09d9c6a4-35a9-493e-8d93-dcc4d20f0834 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.540696] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b458caa-8e63-4087-82f5-54dee466e094 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.569563] env[68233]: DEBUG nova.compute.manager [req-63338545-716d-404a-aefc-342f08077c52 req-6000d226-c73f-4f4e-9b39-6e5af351ff23 service nova] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Detach interface failed, port_id=e8159e73-4c22-4b53-8100-11a2cbfb9853, reason: Instance 916774a9-bfd3-4931-bc3a-1d50471a1c40 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1276.582023] env[68233]: DEBUG nova.network.neutron [-] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.604030] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-635750c6-21a2-4279-838c-82eec1fe0fc1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.613140] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847b2997-b25f-4c4b-ada9-711f52160d20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.641462] env[68233]: DEBUG nova.compute.manager [req-67943de8-c57d-4b80-bad7-b16e945e7b0f req-12df809f-21e4-4ac0-b603-1bb1c0aca048 service nova] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Detach interface failed, port_id=e303f634-41bf-4e57-9c9a-6555e22b32bc, reason: Instance f24af50e-90cd-4398-84d1-a1e1849d01d6 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1276.804197] env[68233]: DEBUG oslo_vmware.api [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783352, 'name': PowerOffVM_Task, 'duration_secs': 0.191692} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.805083] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1276.805083] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1276.805083] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28f38c32-b5f3-4646-9d6c-3a6d93aeebc0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.878365] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1276.878638] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1276.878826] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore2] 81e0800d-7731-433c-9238-b4aa07a4ddda {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1276.879101] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bdc27dd-7171-463e-895d-336168efa6aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.885373] env[68233]: DEBUG oslo_vmware.api [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1276.885373] env[68233]: value = "task-2783354" [ 1276.885373] env[68233]: _type = "Task" [ 1276.885373] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.892886] env[68233]: DEBUG oslo_vmware.api [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783354, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.013861] env[68233]: INFO nova.compute.manager [-] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Took 1.45 seconds to deallocate network for instance. [ 1277.083752] env[68233]: INFO nova.compute.manager [-] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Took 1.67 seconds to deallocate network for instance. [ 1277.395900] env[68233]: DEBUG oslo_vmware.api [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783354, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124375} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.396145] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1277.396338] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1277.396535] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1277.396753] env[68233]: INFO nova.compute.manager [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1277.396983] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1277.397190] env[68233]: DEBUG nova.compute.manager [-] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1277.397289] env[68233]: DEBUG nova.network.neutron [-] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1277.521065] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.521441] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.521715] env[68233]: DEBUG nova.objects.instance [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'resources' on Instance uuid 916774a9-bfd3-4931-bc3a-1d50471a1c40 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1277.590350] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1278.117483] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6da7f02-e535-4e1a-a62a-bcfba0451acb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.125537] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af68f57f-6026-4d32-9f36-a9a28b06294a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.129590] env[68233]: DEBUG nova.compute.manager [req-dc7879a9-085a-4ac9-82d2-8d3d63a26dec req-013876dd-c18c-4c4d-a6aa-79fe06ec5056 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Received event network-vif-deleted-f5288ad5-2e52-4994-bfa9-ff2e77f3fe10 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1278.129776] env[68233]: INFO nova.compute.manager [req-dc7879a9-085a-4ac9-82d2-8d3d63a26dec req-013876dd-c18c-4c4d-a6aa-79fe06ec5056 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Neutron deleted interface f5288ad5-2e52-4994-bfa9-ff2e77f3fe10; detaching it from the instance and deleting it from the info cache [ 1278.129950] env[68233]: DEBUG nova.network.neutron [req-dc7879a9-085a-4ac9-82d2-8d3d63a26dec req-013876dd-c18c-4c4d-a6aa-79fe06ec5056 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.160298] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc7b7602-f31a-41cd-8139-e03059f0da66 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.166800] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09cf651f-2f76-4afc-b224-61e922d24110 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.180454] env[68233]: DEBUG nova.compute.provider_tree [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1278.199171] env[68233]: DEBUG nova.network.neutron [-] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1278.632864] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8519a9ee-b566-47db-96c5-82fbc29ad52d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.642452] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133ef730-9536-452f-98eb-a9960fb602ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.668167] env[68233]: DEBUG nova.compute.manager [req-dc7879a9-085a-4ac9-82d2-8d3d63a26dec req-013876dd-c18c-4c4d-a6aa-79fe06ec5056 service nova] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Detach interface failed, port_id=f5288ad5-2e52-4994-bfa9-ff2e77f3fe10, reason: Instance 81e0800d-7731-433c-9238-b4aa07a4ddda could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1278.683676] env[68233]: DEBUG nova.scheduler.client.report [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1278.701441] env[68233]: INFO nova.compute.manager [-] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Took 1.30 seconds to deallocate network for instance. [ 1279.188961] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.190395] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.600s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.190632] env[68233]: DEBUG nova.objects.instance [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lazy-loading 'resources' on Instance uuid f24af50e-90cd-4398-84d1-a1e1849d01d6 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1279.207757] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.208985] env[68233]: INFO nova.scheduler.client.report [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted allocations for instance 916774a9-bfd3-4931-bc3a-1d50471a1c40 [ 1279.715640] env[68233]: DEBUG oslo_concurrency.lockutils [None req-36261ab4-a31e-4d55-873a-e7b6881fc8bb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "916774a9-bfd3-4931-bc3a-1d50471a1c40" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.786s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.773569] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3a0bdf9-8c06-451f-bb16-09fd583d76b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.781208] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e45d85c-58bb-478d-9070-9cb0823baab5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.810460] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4cc0ba-6211-49ab-9b02-8b4d66513514 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.817363] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96421f7e-551f-4a2f-8427-497a68bb29d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.830054] env[68233]: DEBUG nova.compute.provider_tree [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.333609] env[68233]: DEBUG nova.scheduler.client.report [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1280.811054] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.811425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.811514] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.811704] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.811882] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.814108] env[68233]: INFO nova.compute.manager [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Terminating instance [ 1280.839275] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.649s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1280.841363] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.634s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1280.841587] env[68233]: DEBUG nova.objects.instance [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'resources' on Instance uuid 81e0800d-7731-433c-9238-b4aa07a4ddda {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1280.860611] env[68233]: INFO nova.scheduler.client.report [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Deleted allocations for instance f24af50e-90cd-4398-84d1-a1e1849d01d6 [ 1280.914027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1280.914027] env[68233]: DEBUG oslo_concurrency.lockutils [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.318449] env[68233]: DEBUG nova.compute.manager [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1281.318740] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1281.319653] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbbc4571-0532-4e8c-87f2-7b0720706751 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.327867] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1281.328122] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d02264a1-fbbd-46cb-af46-6297441af0eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.334613] env[68233]: DEBUG oslo_vmware.api [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1281.334613] env[68233]: value = "task-2783355" [ 1281.334613] env[68233]: _type = "Task" [ 1281.334613] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.342119] env[68233]: DEBUG oslo_vmware.api [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.368914] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2be28c45-e357-4781-89e8-d4bc6a30fd6e tempest-ServerActionsTestJSON-1177274884 tempest-ServerActionsTestJSON-1177274884-project-member] Lock "f24af50e-90cd-4398-84d1-a1e1849d01d6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.570s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.415897] env[68233]: INFO nova.compute.manager [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Detaching volume ed873c31-cc71-4640-9c21-d65eb646b508 [ 1281.419809] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752308fe-e826-4ea6-a24c-7d3f75ebf0b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.429731] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c41379c-953e-4a51-9c74-8dbac776d4eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.463233] env[68233]: INFO nova.virt.block_device [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Attempting to driver detach volume ed873c31-cc71-4640-9c21-d65eb646b508 from mountpoint /dev/sdb [ 1281.463563] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1281.463815] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559540', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'name': 'volume-ed873c31-cc71-4640-9c21-d65eb646b508', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aadc7dbe-456c-4bf3-b26d-bac672459fb9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'serial': 'ed873c31-cc71-4640-9c21-d65eb646b508'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1281.464735] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ded807-c71b-4982-83c2-62e5c94e5e17 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.467833] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e35e6c5-98bb-416f-a2fd-3cd0a22965be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.494027] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e934b74-7d00-4de4-a6a6-a38c49030ba6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.498445] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e5f27d-b80d-413a-bfa7-c5c054dee857 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.511589] env[68233]: DEBUG nova.compute.provider_tree [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.515180] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fef129-141b-4a39-8711-928df834f998 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.536565] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e6e218-6a30-4003-bac0-7f3ab97a7dde {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.551870] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] The volume has not been displaced from its original location: [datastore2] volume-ed873c31-cc71-4640-9c21-d65eb646b508/volume-ed873c31-cc71-4640-9c21-d65eb646b508.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1281.557172] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1281.557453] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70944323-f02f-415b-9486-b093ba5fa5a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.575838] env[68233]: DEBUG oslo_vmware.api [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1281.575838] env[68233]: value = "task-2783356" [ 1281.575838] env[68233]: _type = "Task" [ 1281.575838] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.583330] env[68233]: DEBUG oslo_vmware.api [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.844654] env[68233]: DEBUG oslo_vmware.api [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783355, 'name': PowerOffVM_Task, 'duration_secs': 0.199235} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.844963] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1281.845157] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1281.845430] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f149746-02d7-4afb-86d7-bc7ca530bee1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.907836] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1281.908111] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1281.908305] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleting the datastore file [datastore2] 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1281.908557] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95fa59ed-7688-4776-9af7-01bbe537e6fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.914878] env[68233]: DEBUG oslo_vmware.api [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for the task: (returnval){ [ 1281.914878] env[68233]: value = "task-2783358" [ 1281.914878] env[68233]: _type = "Task" [ 1281.914878] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1281.924103] env[68233]: DEBUG oslo_vmware.api [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783358, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.016382] env[68233]: DEBUG nova.scheduler.client.report [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1282.085031] env[68233]: DEBUG oslo_vmware.api [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783356, 'name': ReconfigVM_Task, 'duration_secs': 0.226294} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.085149] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1282.090049] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae962f36-4950-4b68-a3c9-19703e3472a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.103995] env[68233]: DEBUG oslo_vmware.api [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1282.103995] env[68233]: value = "task-2783359" [ 1282.103995] env[68233]: _type = "Task" [ 1282.103995] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.112944] env[68233]: DEBUG oslo_vmware.api [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.424707] env[68233]: DEBUG oslo_vmware.api [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Task: {'id': task-2783358, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135673} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.424975] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1282.425186] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1282.425354] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1282.425527] env[68233]: INFO nova.compute.manager [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1282.425801] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1282.426046] env[68233]: DEBUG nova.compute.manager [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1282.426117] env[68233]: DEBUG nova.network.neutron [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1282.521299] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.544326] env[68233]: INFO nova.scheduler.client.report [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted allocations for instance 81e0800d-7731-433c-9238-b4aa07a4ddda [ 1282.614186] env[68233]: DEBUG oslo_vmware.api [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783359, 'name': ReconfigVM_Task, 'duration_secs': 0.159195} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.614469] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559540', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'name': 'volume-ed873c31-cc71-4640-9c21-d65eb646b508', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'aadc7dbe-456c-4bf3-b26d-bac672459fb9', 'attached_at': '', 'detached_at': '', 'volume_id': 'ed873c31-cc71-4640-9c21-d65eb646b508', 'serial': 'ed873c31-cc71-4640-9c21-d65eb646b508'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1283.052918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3591bc1b-49e6-4bef-b31b-8a4165ca243f tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "81e0800d-7731-433c-9238-b4aa07a4ddda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.282s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1283.127450] env[68233]: DEBUG nova.compute.manager [req-e8a90ee9-3e98-4114-9374-4f79b5b44456 req-fa2074ba-beab-47c7-9e3b-b8e6e38cbf62 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Received event network-vif-deleted-b4077afe-a7b6-4653-be23-4c735d67fa05 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1283.127450] env[68233]: INFO nova.compute.manager [req-e8a90ee9-3e98-4114-9374-4f79b5b44456 req-fa2074ba-beab-47c7-9e3b-b8e6e38cbf62 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Neutron deleted interface b4077afe-a7b6-4653-be23-4c735d67fa05; detaching it from the instance and deleting it from the info cache [ 1283.127450] env[68233]: DEBUG nova.network.neutron [req-e8a90ee9-3e98-4114-9374-4f79b5b44456 req-fa2074ba-beab-47c7-9e3b-b8e6e38cbf62 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.163196] env[68233]: DEBUG nova.objects.instance [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'flavor' on Instance uuid aadc7dbe-456c-4bf3-b26d-bac672459fb9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.610471] env[68233]: DEBUG nova.network.neutron [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.630269] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1cfb901c-350d-47d4-a464-cbff3576af8f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.640309] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2553f5f2-1ca6-4a1b-bce9-6ab4135e5c93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.668459] env[68233]: DEBUG nova.compute.manager [req-e8a90ee9-3e98-4114-9374-4f79b5b44456 req-fa2074ba-beab-47c7-9e3b-b8e6e38cbf62 service nova] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Detach interface failed, port_id=b4077afe-a7b6-4653-be23-4c735d67fa05, reason: Instance 863e15c6-caa4-47aa-902a-7be2c9538687 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1284.114109] env[68233]: INFO nova.compute.manager [-] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Took 1.69 seconds to deallocate network for instance. [ 1284.172737] env[68233]: DEBUG oslo_concurrency.lockutils [None req-28a6ff1e-7c06-4cc7-8cd5-b06498c332fb tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.259s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1284.622115] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.622444] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.622669] env[68233]: DEBUG nova.objects.instance [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lazy-loading 'resources' on Instance uuid 863e15c6-caa4-47aa-902a-7be2c9538687 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1285.187573] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.187832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.188085] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.188257] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.188425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.191299] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8145e7-6bc1-4f2c-8701-30745cfad3ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.194398] env[68233]: INFO nova.compute.manager [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Terminating instance [ 1285.200456] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943638e3-f791-4da6-977e-39b2ff2901c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.230465] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71032ea8-104c-49cb-9b42-a7204cf4476a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.237474] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d182593a-a477-4119-97ae-859081aaaa6f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.252253] env[68233]: DEBUG nova.compute.provider_tree [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.514184] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.514457] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.514658] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.514837] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.515013] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.517506] env[68233]: INFO nova.compute.manager [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Terminating instance [ 1285.698843] env[68233]: DEBUG nova.compute.manager [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1285.699154] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1285.700043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-765d1efa-1f79-4513-95e8-4ab37af9641c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.707665] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1285.707909] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1abd0988-93de-4b47-b27a-2252628b9188 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.714256] env[68233]: DEBUG oslo_vmware.api [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1285.714256] env[68233]: value = "task-2783361" [ 1285.714256] env[68233]: _type = "Task" [ 1285.714256] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.721612] env[68233]: DEBUG oslo_vmware.api [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783361, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.773725] env[68233]: ERROR nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] [req-af5a0daf-cee3-4ab3-ae9c-305bb270bd4b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 51aa13e7-0977-4031-b209-4ae90c83752c. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-af5a0daf-cee3-4ab3-ae9c-305bb270bd4b"}]} [ 1285.790524] env[68233]: DEBUG nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1285.803887] env[68233]: DEBUG nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1285.804175] env[68233]: DEBUG nova.compute.provider_tree [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 175, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1285.817764] env[68233]: DEBUG nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1285.840064] env[68233]: DEBUG nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1285.906453] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf2e9c8-e613-491f-9164-25c02d857a94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.914700] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2775cbfa-2039-45aa-9b96-a41db174a510 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.944243] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9297c89a-126c-43f6-a6a4-578edfbe3bce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.951353] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2df658ef-5e29-4abc-b58e-c45e2171b5a0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.965830] env[68233]: DEBUG nova.compute.provider_tree [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1286.021141] env[68233]: DEBUG nova.compute.manager [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1286.021350] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1286.022105] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd9c825-a0e7-4e49-a195-9592bfce47c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.029319] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1286.029542] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-819a5a81-fbdf-4b5c-a59b-2a903e150e2b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.035438] env[68233]: DEBUG oslo_vmware.api [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1286.035438] env[68233]: value = "task-2783362" [ 1286.035438] env[68233]: _type = "Task" [ 1286.035438] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.042735] env[68233]: DEBUG oslo_vmware.api [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.223564] env[68233]: DEBUG oslo_vmware.api [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783361, 'name': PowerOffVM_Task, 'duration_secs': 0.166864} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.223877] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.224066] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1286.224314] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9dce3f0-2888-4a27-bb4b-b58664a0e7d1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.291743] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1286.292014] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1286.292228] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleting the datastore file [datastore2] aadc7dbe-456c-4bf3-b26d-bac672459fb9 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1286.292491] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6248c66e-5a31-486f-891e-82b1ee83fb27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.298948] env[68233]: DEBUG oslo_vmware.api [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1286.298948] env[68233]: value = "task-2783364" [ 1286.298948] env[68233]: _type = "Task" [ 1286.298948] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.306912] env[68233]: DEBUG oslo_vmware.api [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.496314] env[68233]: DEBUG nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updated inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1286.496563] env[68233]: DEBUG nova.compute.provider_tree [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updating resource provider 51aa13e7-0977-4031-b209-4ae90c83752c generation from 176 to 177 during operation: update_inventory {{(pid=68233) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1286.496770] env[68233]: DEBUG nova.compute.provider_tree [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1286.545870] env[68233]: DEBUG oslo_vmware.api [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783362, 'name': PowerOffVM_Task, 'duration_secs': 0.17672} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.546107] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.546282] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1286.546525] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdc64f7a-9fa9-44a0-adba-44e1327f70a9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.608555] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1286.608772] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Deleting contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1286.608954] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleting the datastore file [datastore1] dd59cab5-3f9a-42cc-93f1-75cea940acdd {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1286.609233] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c028623-06aa-4d10-96ea-3390b1456d50 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.616114] env[68233]: DEBUG oslo_vmware.api [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for the task: (returnval){ [ 1286.616114] env[68233]: value = "task-2783366" [ 1286.616114] env[68233]: _type = "Task" [ 1286.616114] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.624401] env[68233]: DEBUG oslo_vmware.api [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.809585] env[68233]: DEBUG oslo_vmware.api [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122122} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.809766] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1286.809949] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1286.810166] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1286.810340] env[68233]: INFO nova.compute.manager [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1286.810577] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1286.810766] env[68233]: DEBUG nova.compute.manager [-] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1286.810859] env[68233]: DEBUG nova.network.neutron [-] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1287.001785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.379s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.023491] env[68233]: INFO nova.scheduler.client.report [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Deleted allocations for instance 863e15c6-caa4-47aa-902a-7be2c9538687 [ 1287.126574] env[68233]: DEBUG oslo_vmware.api [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Task: {'id': task-2783366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187039} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.126846] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1287.128183] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Deleted contents of the VM from datastore datastore1 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1287.128183] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1287.128183] env[68233]: INFO nova.compute.manager [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1287.128183] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1287.128393] env[68233]: DEBUG nova.compute.manager [-] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1287.128496] env[68233]: DEBUG nova.network.neutron [-] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1287.280689] env[68233]: DEBUG nova.compute.manager [req-087a06b0-45ea-4c8b-ae94-797c3ab7e66c req-9f9bd55e-e223-4661-80a1-345a0abd129c service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Received event network-vif-deleted-85aae8a7-b4dc-4227-8b64-2e08fa7fa580 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1287.281138] env[68233]: INFO nova.compute.manager [req-087a06b0-45ea-4c8b-ae94-797c3ab7e66c req-9f9bd55e-e223-4661-80a1-345a0abd129c service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Neutron deleted interface 85aae8a7-b4dc-4227-8b64-2e08fa7fa580; detaching it from the instance and deleting it from the info cache [ 1287.281138] env[68233]: DEBUG nova.network.neutron [req-087a06b0-45ea-4c8b-ae94-797c3ab7e66c req-9f9bd55e-e223-4661-80a1-345a0abd129c service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.385672] env[68233]: DEBUG nova.compute.manager [req-bfb62ea7-e182-48c8-9e4d-3f561e9c779d req-2445d9e8-2424-483b-8235-c6e5a11e2299 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Received event network-vif-deleted-f8c8623e-abba-4da9-8ab2-20413bb09889 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1287.385981] env[68233]: INFO nova.compute.manager [req-bfb62ea7-e182-48c8-9e4d-3f561e9c779d req-2445d9e8-2424-483b-8235-c6e5a11e2299 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Neutron deleted interface f8c8623e-abba-4da9-8ab2-20413bb09889; detaching it from the instance and deleting it from the info cache [ 1287.386102] env[68233]: DEBUG nova.network.neutron [req-bfb62ea7-e182-48c8-9e4d-3f561e9c779d req-2445d9e8-2424-483b-8235-c6e5a11e2299 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.535531] env[68233]: DEBUG oslo_concurrency.lockutils [None req-7b92e145-dd47-4d88-9d5c-31b4b6d5657a tempest-AttachVolumeShelveTestJSON-1725850099 tempest-AttachVolumeShelveTestJSON-1725850099-project-member] Lock "863e15c6-caa4-47aa-902a-7be2c9538687" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.724s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.757160] env[68233]: DEBUG nova.network.neutron [-] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.783435] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-deb2807e-cb42-4fe9-9352-c4f6a3308f15 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.792816] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df94394d-ba22-4517-b256-644e45982ae8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.817231] env[68233]: DEBUG nova.compute.manager [req-087a06b0-45ea-4c8b-ae94-797c3ab7e66c req-9f9bd55e-e223-4661-80a1-345a0abd129c service nova] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Detach interface failed, port_id=85aae8a7-b4dc-4227-8b64-2e08fa7fa580, reason: Instance aadc7dbe-456c-4bf3-b26d-bac672459fb9 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1287.869406] env[68233]: DEBUG nova.network.neutron [-] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1287.889026] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37e6c55e-5435-406f-a4c4-9b20d3fca8d4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.898448] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e9801e-a560-47a5-9d61-ec0fbed19fd3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.924234] env[68233]: DEBUG nova.compute.manager [req-bfb62ea7-e182-48c8-9e4d-3f561e9c779d req-2445d9e8-2424-483b-8235-c6e5a11e2299 service nova] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Detach interface failed, port_id=f8c8623e-abba-4da9-8ab2-20413bb09889, reason: Instance dd59cab5-3f9a-42cc-93f1-75cea940acdd could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1288.260976] env[68233]: INFO nova.compute.manager [-] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Took 1.45 seconds to deallocate network for instance. [ 1288.373190] env[68233]: INFO nova.compute.manager [-] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Took 1.24 seconds to deallocate network for instance. [ 1288.768052] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1288.768052] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.768052] env[68233]: DEBUG nova.objects.instance [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'resources' on Instance uuid aadc7dbe-456c-4bf3-b26d-bac672459fb9 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1288.885761] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.321728] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e28672-f201-4589-9c7c-9ed86c3d7b56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.330492] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c8ede9-f696-4a2f-a891-705d386011ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.364041] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104d49c3-9d86-4bb3-ab68-058e351fcf19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.371242] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868dbce3-fe3a-4e89-afbb-05a2ab94c537 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.384234] env[68233]: DEBUG nova.compute.provider_tree [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.887338] env[68233]: DEBUG nova.scheduler.client.report [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1290.392599] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.625s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.396058] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.510s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.396058] env[68233]: DEBUG nova.objects.instance [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lazy-loading 'resources' on Instance uuid dd59cab5-3f9a-42cc-93f1-75cea940acdd {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1290.411368] env[68233]: INFO nova.scheduler.client.report [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted allocations for instance aadc7dbe-456c-4bf3-b26d-bac672459fb9 [ 1290.918162] env[68233]: DEBUG oslo_concurrency.lockutils [None req-13f1fce1-7119-486c-95be-782d21196d17 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "aadc7dbe-456c-4bf3-b26d-bac672459fb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.730s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.935299] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931bd98e-1410-4469-8365-98a33430a6dd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.943803] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c0965d-470b-4f3e-a6c7-e64b652914df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.974146] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfb53dd-6823-46cd-a649-76c65499cca2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.980860] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3977c5c-b74c-4805-9656-de8d28e60e65 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.993693] env[68233]: DEBUG nova.compute.provider_tree [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.497634] env[68233]: DEBUG nova.scheduler.client.report [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1292.004707] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.608s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.028686] env[68233]: INFO nova.scheduler.client.report [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Deleted allocations for instance dd59cab5-3f9a-42cc-93f1-75cea940acdd [ 1292.538400] env[68233]: DEBUG oslo_concurrency.lockutils [None req-c21796f0-67bd-4cae-a0a8-458f0eb842d1 tempest-ServerActionsTestOtherA-1945293322 tempest-ServerActionsTestOtherA-1945293322-project-member] Lock "dd59cab5-3f9a-42cc-93f1-75cea940acdd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.024s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.202370] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.202615] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.425153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.425427] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.705346] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1293.930741] env[68233]: DEBUG nova.compute.utils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1294.228169] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1294.228169] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.228735] env[68233]: INFO nova.compute.claims [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1294.435465] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.284823] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c8eab0-8916-4d85-893d-e43dff55f067 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.293197] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5df23b2-3cfc-4366-89ce-e0d1586064d8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.324162] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a810272-0ec8-454e-a413-8371c0e83749 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.331376] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1240c0-2760-4fb7-bb49-d0d8e1e316f8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.345753] env[68233]: DEBUG nova.compute.provider_tree [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1295.503274] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.503581] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.503874] env[68233]: INFO nova.compute.manager [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Attaching volume ef6e8b0e-52e0-4267-b53d-0ac6bc07b773 to /dev/sdb [ 1295.540083] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57e832c-ce11-4146-a062-e7f456547fe9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.547391] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27da0d37-323f-4cf1-91f8-1b89588290ae {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.560397] env[68233]: DEBUG nova.virt.block_device [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating existing volume attachment record: 62ec580f-ce44-44bd-a197-9153788e0942 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1295.851019] env[68233]: DEBUG nova.scheduler.client.report [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1296.355680] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.129s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.356273] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1296.862470] env[68233]: DEBUG nova.compute.utils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1296.863834] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1296.867830] env[68233]: DEBUG nova.network.neutron [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1296.934243] env[68233]: DEBUG nova.policy [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5879d5d831004ae3b4273284da66358d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd564a0ed01a84ffca782d1344faba070', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1297.176580] env[68233]: DEBUG nova.network.neutron [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Successfully created port: c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1297.371690] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1298.381965] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1298.407724] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1298.407971] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1298.408147] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1298.408332] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1298.408507] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1298.408630] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1298.408839] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1298.409008] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1298.409189] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1298.409353] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1298.409529] env[68233]: DEBUG nova.virt.hardware [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1298.410412] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b220244e-6dee-4ee4-bb17-981c0778d984 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.418603] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a54e54-82b6-4e5a-b3c3-7df63cf312fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.564793] env[68233]: DEBUG nova.compute.manager [req-be701db1-dfc4-46c5-b0a4-37f88dc18844 req-a45a8ced-8fef-420f-ba82-f41b6f8f0e23 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Received event network-vif-plugged-c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1298.565040] env[68233]: DEBUG oslo_concurrency.lockutils [req-be701db1-dfc4-46c5-b0a4-37f88dc18844 req-a45a8ced-8fef-420f-ba82-f41b6f8f0e23 service nova] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.565254] env[68233]: DEBUG oslo_concurrency.lockutils [req-be701db1-dfc4-46c5-b0a4-37f88dc18844 req-a45a8ced-8fef-420f-ba82-f41b6f8f0e23 service nova] Lock "e905567b-c78e-4c21-b134-78f444e941f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.565421] env[68233]: DEBUG oslo_concurrency.lockutils [req-be701db1-dfc4-46c5-b0a4-37f88dc18844 req-a45a8ced-8fef-420f-ba82-f41b6f8f0e23 service nova] Lock "e905567b-c78e-4c21-b134-78f444e941f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.565590] env[68233]: DEBUG nova.compute.manager [req-be701db1-dfc4-46c5-b0a4-37f88dc18844 req-a45a8ced-8fef-420f-ba82-f41b6f8f0e23 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] No waiting events found dispatching network-vif-plugged-c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1298.565776] env[68233]: WARNING nova.compute.manager [req-be701db1-dfc4-46c5-b0a4-37f88dc18844 req-a45a8ced-8fef-420f-ba82-f41b6f8f0e23 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Received unexpected event network-vif-plugged-c3822ece-d683-4d7f-a027-e064a545d92e for instance with vm_state building and task_state spawning. [ 1298.646493] env[68233]: DEBUG nova.network.neutron [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Successfully updated port: c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1298.700375] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.700706] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.149885] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1299.149957] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1299.150131] env[68233]: DEBUG nova.network.neutron [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1299.203390] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1299.686517] env[68233]: DEBUG nova.network.neutron [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1299.726640] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1299.726958] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.728697] env[68233]: INFO nova.compute.claims [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1299.827764] env[68233]: DEBUG nova.network.neutron [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updating instance_info_cache with network_info: [{"id": "c3822ece-d683-4d7f-a027-e064a545d92e", "address": "fa:16:3e:c5:2a:38", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3822ece-d6", "ovs_interfaceid": "c3822ece-d683-4d7f-a027-e064a545d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.106326] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1300.106566] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559546', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'name': 'volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'serial': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1300.107484] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453569dc-d626-498c-8391-0c824e322ef1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.123847] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ff028d-6d2c-41da-961f-0417b1938c1d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.147448] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773/volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1300.147689] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46188941-451f-4b1d-bc6a-62e2e1a3384a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.165218] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1300.165218] env[68233]: value = "task-2783370" [ 1300.165218] env[68233]: _type = "Task" [ 1300.165218] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.172658] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.331064] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.331064] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Instance network_info: |[{"id": "c3822ece-d683-4d7f-a027-e064a545d92e", "address": "fa:16:3e:c5:2a:38", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3822ece-d6", "ovs_interfaceid": "c3822ece-d683-4d7f-a027-e064a545d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1300.331378] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c5:2a:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9ee6f9-33be-4f58-8248-694024ec31d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3822ece-d683-4d7f-a027-e064a545d92e', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1300.338604] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1300.338824] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1300.339063] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da3393e5-31a8-472f-a625-b89169e84cf0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.361068] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1300.361068] env[68233]: value = "task-2783371" [ 1300.361068] env[68233]: _type = "Task" [ 1300.361068] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.370405] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783371, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.595214] env[68233]: DEBUG nova.compute.manager [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Received event network-changed-c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1300.595339] env[68233]: DEBUG nova.compute.manager [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Refreshing instance network info cache due to event network-changed-c3822ece-d683-4d7f-a027-e064a545d92e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1300.595559] env[68233]: DEBUG oslo_concurrency.lockutils [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] Acquiring lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.595707] env[68233]: DEBUG oslo_concurrency.lockutils [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] Acquired lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.595956] env[68233]: DEBUG nova.network.neutron [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Refreshing network info cache for port c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1300.675531] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783370, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1300.804393] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae79b4c-a1b1-4d17-b7b2-bfde7ae233aa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.814527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f730488-7093-46d4-9990-6243df57cbb6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.866121] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7125f3-2d2d-484a-a1a1-cd17e4356c8a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.876389] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783371, 'name': CreateVM_Task, 'duration_secs': 0.315864} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.876389] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1300.876980] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e91bd14-1da4-4aed-afc4-2dc787f750e6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.881149] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.881321] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.881640] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1300.881897] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fcb57c4-e1e9-4c67-a1dd-e633fba60cc8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.895363] env[68233]: DEBUG nova.compute.provider_tree [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1300.898385] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1300.898385] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234b876-8484-1bc5-7495-6d85e6d34e5b" [ 1300.898385] env[68233]: _type = "Task" [ 1300.898385] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.906420] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5234b876-8484-1bc5-7495-6d85e6d34e5b, 'name': SearchDatastore_Task, 'duration_secs': 0.012462} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.907344] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1300.907573] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1300.907801] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1300.907947] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1300.908136] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1300.908638] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-167d7385-733f-49ec-8707-236d9a4a7928 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.916766] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1300.916946] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1300.917658] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fbb92a8-e70d-4c16-85b0-df16c4c05a1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.922468] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1300.922468] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c12897-042d-7470-92b2-4570ecbe8fda" [ 1300.922468] env[68233]: _type = "Task" [ 1300.922468] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1300.930461] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c12897-042d-7470-92b2-4570ecbe8fda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.176248] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783370, 'name': ReconfigVM_Task, 'duration_secs': 0.512687} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.176532] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfigured VM instance instance-0000007b to attach disk [datastore2] volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773/volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1301.183230] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca981248-2036-47e2-8ec3-602db62b2562 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.199110] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1301.199110] env[68233]: value = "task-2783372" [ 1301.199110] env[68233]: _type = "Task" [ 1301.199110] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.208471] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783372, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.318147] env[68233]: DEBUG nova.network.neutron [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updated VIF entry in instance network info cache for port c3822ece-d683-4d7f-a027-e064a545d92e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1301.318548] env[68233]: DEBUG nova.network.neutron [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updating instance_info_cache with network_info: [{"id": "c3822ece-d683-4d7f-a027-e064a545d92e", "address": "fa:16:3e:c5:2a:38", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3822ece-d6", "ovs_interfaceid": "c3822ece-d683-4d7f-a027-e064a545d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.399823] env[68233]: DEBUG nova.scheduler.client.report [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1301.433516] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52c12897-042d-7470-92b2-4570ecbe8fda, 'name': SearchDatastore_Task, 'duration_secs': 0.009341} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.434378] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ddabc12-ecb2-447c-babf-5b352c1d3eeb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.440465] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1301.440465] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520bcccc-00ce-45a5-a298-b6a9c1fb4b21" [ 1301.440465] env[68233]: _type = "Task" [ 1301.440465] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.448105] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520bcccc-00ce-45a5-a298-b6a9c1fb4b21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.709848] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783372, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.821920] env[68233]: DEBUG oslo_concurrency.lockutils [req-ce374579-6cfa-4a44-b895-e351a81511cd req-69596153-96c9-4fed-9cd0-302724fd5d2a service nova] Releasing lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.904365] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.177s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.904935] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1301.950768] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]520bcccc-00ce-45a5-a298-b6a9c1fb4b21, 'name': SearchDatastore_Task, 'duration_secs': 0.017936} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1301.951023] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1301.951298] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e905567b-c78e-4c21-b134-78f444e941f4/e905567b-c78e-4c21-b134-78f444e941f4.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1301.951544] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12f625e6-9492-4592-87ed-e1f19fe25d8f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.957836] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1301.957836] env[68233]: value = "task-2783373" [ 1301.957836] env[68233]: _type = "Task" [ 1301.957836] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.965150] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.211753] env[68233]: DEBUG oslo_vmware.api [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783372, 'name': ReconfigVM_Task, 'duration_secs': 0.566734} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.212117] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559546', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'name': 'volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'serial': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1302.410725] env[68233]: DEBUG nova.compute.utils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1302.414421] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1302.414755] env[68233]: DEBUG nova.network.neutron [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1302.467228] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.41571} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.468544] env[68233]: DEBUG nova.policy [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18e896b81a32439ebc3db2aacd4d8967', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb98b234b87a4120ad06095426f74ce0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1302.469959] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] e905567b-c78e-4c21-b134-78f444e941f4/e905567b-c78e-4c21-b134-78f444e941f4.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1302.470191] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1302.470434] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7dbe7ce-d6d2-47d4-9246-ec5131846e2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.476597] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1302.476597] env[68233]: value = "task-2783374" [ 1302.476597] env[68233]: _type = "Task" [ 1302.476597] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.484371] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.740802] env[68233]: DEBUG nova.network.neutron [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Successfully created port: 46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1302.918437] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1302.987096] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068014} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.987382] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1302.988153] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481f5392-12db-432a-bd6f-d6ea1aa160af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.010573] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] e905567b-c78e-4c21-b134-78f444e941f4/e905567b-c78e-4c21-b134-78f444e941f4.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1303.010817] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b8c0d09-a31f-4dd1-845f-68dc6992d1bb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.032340] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1303.032340] env[68233]: value = "task-2783375" [ 1303.032340] env[68233]: _type = "Task" [ 1303.032340] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.043033] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783375, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.247438] env[68233]: DEBUG nova.objects.instance [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid 280bc403-3d10-4a29-9507-c548d9cf1d1a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1303.545234] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783375, 'name': ReconfigVM_Task, 'duration_secs': 0.280919} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.545727] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Reconfigured VM instance instance-0000007c to attach disk [datastore2] e905567b-c78e-4c21-b134-78f444e941f4/e905567b-c78e-4c21-b134-78f444e941f4.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1303.546651] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-be30163d-65f2-4698-90c9-3703e6b5f437 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.553853] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1303.553853] env[68233]: value = "task-2783376" [ 1303.553853] env[68233]: _type = "Task" [ 1303.553853] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.561594] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783376, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1303.752841] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d1bc6a1e-05b5-477d-b712-ec4b876b5372 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.249s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.928011] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1303.953586] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1303.953867] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1303.954050] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1303.954245] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1303.954392] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1303.954537] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1303.954743] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1303.954901] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1303.955090] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1303.955257] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1303.955449] env[68233]: DEBUG nova.virt.hardware [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1303.956331] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcff248-245f-4023-ab95-92ff466ed691 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.963932] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14946ddc-241a-436f-a6d2-786a918308b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.062673] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783376, 'name': Rename_Task, 'duration_secs': 0.279448} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.062930] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1304.063385] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73bec58a-dff3-4b40-92e0-6a11a190d000 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.068931] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1304.068931] env[68233]: value = "task-2783377" [ 1304.068931] env[68233]: _type = "Task" [ 1304.068931] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.077984] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783377, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.090901] env[68233]: DEBUG nova.compute.manager [req-46797974-0511-4a7e-a44d-b48eca73d856 req-8db697eb-8771-459d-b82b-ff686ba42182 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Received event network-vif-plugged-46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1304.091186] env[68233]: DEBUG oslo_concurrency.lockutils [req-46797974-0511-4a7e-a44d-b48eca73d856 req-8db697eb-8771-459d-b82b-ff686ba42182 service nova] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.091458] env[68233]: DEBUG oslo_concurrency.lockutils [req-46797974-0511-4a7e-a44d-b48eca73d856 req-8db697eb-8771-459d-b82b-ff686ba42182 service nova] Lock "1b70d077-7610-4524-ad72-ac68413f4620-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.091741] env[68233]: DEBUG oslo_concurrency.lockutils [req-46797974-0511-4a7e-a44d-b48eca73d856 req-8db697eb-8771-459d-b82b-ff686ba42182 service nova] Lock "1b70d077-7610-4524-ad72-ac68413f4620-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.091917] env[68233]: DEBUG nova.compute.manager [req-46797974-0511-4a7e-a44d-b48eca73d856 req-8db697eb-8771-459d-b82b-ff686ba42182 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] No waiting events found dispatching network-vif-plugged-46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1304.092309] env[68233]: WARNING nova.compute.manager [req-46797974-0511-4a7e-a44d-b48eca73d856 req-8db697eb-8771-459d-b82b-ff686ba42182 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Received unexpected event network-vif-plugged-46866dde-e539-4c4c-9367-f7bc868ce8da for instance with vm_state building and task_state spawning. [ 1304.172634] env[68233]: DEBUG nova.network.neutron [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Successfully updated port: 46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.579870] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783377, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.657305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.657831] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.673814] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.673814] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.673972] env[68233]: DEBUG nova.network.neutron [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.079999] env[68233]: DEBUG oslo_vmware.api [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783377, 'name': PowerOnVM_Task, 'duration_secs': 0.617911} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.080388] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1305.080492] env[68233]: INFO nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Took 6.70 seconds to spawn the instance on the hypervisor. [ 1305.080676] env[68233]: DEBUG nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1305.081449] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fb000d6-6967-4096-862b-43d106ace83d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.161299] env[68233]: DEBUG nova.compute.utils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1305.205999] env[68233]: DEBUG nova.network.neutron [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.328767] env[68233]: DEBUG nova.network.neutron [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.597480] env[68233]: INFO nova.compute.manager [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Took 11.39 seconds to build instance. [ 1305.664211] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.831451] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.831780] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Instance network_info: |[{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1305.832228] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:1f:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46866dde-e539-4c4c-9367-f7bc868ce8da', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1305.839581] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating folder: Project (eb98b234b87a4120ad06095426f74ce0). Parent ref: group-v559223. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1305.839844] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0bee003-e629-46cd-8ea0-fbc79ee52e74 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.850605] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created folder: Project (eb98b234b87a4120ad06095426f74ce0) in parent group-v559223. [ 1305.850747] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating folder: Instances. Parent ref: group-v559548. {{(pid=68233) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1305.850964] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9fac883e-b261-4c70-b1ae-93ed4dd23404 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.859299] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created folder: Instances in parent group-v559548. [ 1305.859524] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1305.859702] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1305.859888] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba7e903c-1b89-4b01-a0c9-d90c428f5251 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.879599] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1305.879599] env[68233]: value = "task-2783380" [ 1305.879599] env[68233]: _type = "Task" [ 1305.879599] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.886507] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783380, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.100044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eeae3f72-139c-4f9c-a19c-df36c6eb9d87 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.897s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.117409] env[68233]: DEBUG nova.compute.manager [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Received event network-changed-46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1306.117631] env[68233]: DEBUG nova.compute.manager [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Refreshing instance network info cache due to event network-changed-46866dde-e539-4c4c-9367-f7bc868ce8da. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1306.117843] env[68233]: DEBUG oslo_concurrency.lockutils [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.117989] env[68233]: DEBUG oslo_concurrency.lockutils [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.118290] env[68233]: DEBUG nova.network.neutron [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Refreshing network info cache for port 46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1306.389604] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783380, 'name': CreateVM_Task, 'duration_secs': 0.306597} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.389804] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.390469] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.390633] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.390953] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1306.391217] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83a25cba-bf9d-4f09-993c-12af8f8e3eba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.395560] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1306.395560] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5230cb0a-63ba-190d-f60e-51db1808cf91" [ 1306.395560] env[68233]: _type = "Task" [ 1306.395560] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.403200] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5230cb0a-63ba-190d-f60e-51db1808cf91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.721932] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1306.722190] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1306.722430] env[68233]: INFO nova.compute.manager [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Attaching volume 284c0291-140c-4c8d-b14c-ada441c2c537 to /dev/sdc [ 1306.760638] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bb308c-c742-473b-83fd-9ad635f3880b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.770830] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14db66f3-d666-4c63-b312-7d013a11da02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.783587] env[68233]: DEBUG nova.virt.block_device [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating existing volume attachment record: 5814b9ff-18fe-4103-b4c1-f7e3d5ee5fdb {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1306.852913] env[68233]: DEBUG nova.network.neutron [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updated VIF entry in instance network info cache for port 46866dde-e539-4c4c-9367-f7bc868ce8da. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1306.853309] env[68233]: DEBUG nova.network.neutron [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.906124] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5230cb0a-63ba-190d-f60e-51db1808cf91, 'name': SearchDatastore_Task, 'duration_secs': 0.010254} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.906427] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.906652] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1306.906885] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.907072] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.907264] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1306.907515] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5f83b1d-a7b6-42d1-97c0-ef7cbfa82dd2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.916162] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1306.916347] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1306.917043] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0e2a148-b774-45be-93b9-3125bf449004 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.922176] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1306.922176] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5280c564-7e52-ff9f-8002-9a79e5cf7b90" [ 1306.922176] env[68233]: _type = "Task" [ 1306.922176] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.935382] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5280c564-7e52-ff9f-8002-9a79e5cf7b90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.356129] env[68233]: DEBUG oslo_concurrency.lockutils [req-71d5920c-d2dc-4a1d-8d50-e7362a9faf34 req-60d23c4d-9bbd-4b4c-87b6-c72ccdf6efb2 service nova] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.432651] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5280c564-7e52-ff9f-8002-9a79e5cf7b90, 'name': SearchDatastore_Task, 'duration_secs': 0.009491} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.433435] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9633800b-e227-4550-8ac0-0fcd0024fc91 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.438311] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1307.438311] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aa3a92-9268-4ec8-669f-d20762a2e61d" [ 1307.438311] env[68233]: _type = "Task" [ 1307.438311] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.445702] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aa3a92-9268-4ec8-669f-d20762a2e61d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.949879] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52aa3a92-9268-4ec8-669f-d20762a2e61d, 'name': SearchDatastore_Task, 'duration_secs': 0.008591} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.950092] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.950342] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620/1b70d077-7610-4524-ad72-ac68413f4620.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1307.950591] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a80577b1-faa8-4873-b5c7-57c5d21df0c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.956449] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1307.956449] env[68233]: value = "task-2783382" [ 1307.956449] env[68233]: _type = "Task" [ 1307.956449] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.963465] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783382, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.152518] env[68233]: DEBUG nova.compute.manager [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Received event network-changed-c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1308.152727] env[68233]: DEBUG nova.compute.manager [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Refreshing instance network info cache due to event network-changed-c3822ece-d683-4d7f-a027-e064a545d92e. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1308.152995] env[68233]: DEBUG oslo_concurrency.lockutils [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] Acquiring lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.153169] env[68233]: DEBUG oslo_concurrency.lockutils [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] Acquired lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1308.153381] env[68233]: DEBUG nova.network.neutron [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Refreshing network info cache for port c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1308.465897] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783382, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.41606} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.467644] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620/1b70d077-7610-4524-ad72-ac68413f4620.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1308.467644] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1308.467644] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27770fea-8c46-4449-851d-21c24125a981 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.473572] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1308.473572] env[68233]: value = "task-2783383" [ 1308.473572] env[68233]: _type = "Task" [ 1308.473572] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.480702] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783383, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.836664] env[68233]: DEBUG nova.network.neutron [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updated VIF entry in instance network info cache for port c3822ece-d683-4d7f-a027-e064a545d92e. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1308.837039] env[68233]: DEBUG nova.network.neutron [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updating instance_info_cache with network_info: [{"id": "c3822ece-d683-4d7f-a027-e064a545d92e", "address": "fa:16:3e:c5:2a:38", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.132", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3822ece-d6", "ovs_interfaceid": "c3822ece-d683-4d7f-a027-e064a545d92e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1308.986728] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783383, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062827} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.986728] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1308.986728] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eef0d3f-7ab7-49bb-b62f-bc828f52276a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.005039] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620/1b70d077-7610-4524-ad72-ac68413f4620.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1309.005328] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd2d7c75-28b8-4b69-887f-3ebf99f79843 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.023299] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1309.023299] env[68233]: value = "task-2783385" [ 1309.023299] env[68233]: _type = "Task" [ 1309.023299] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.031929] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783385, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.340218] env[68233]: DEBUG oslo_concurrency.lockutils [req-79d6d0b9-9c0b-4a59-8508-6eda248d8ef4 req-07e11cd4-065c-4c5e-9eaa-41c30a7db6f5 service nova] Releasing lock "refresh_cache-e905567b-c78e-4c21-b134-78f444e941f4" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1309.533887] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.034286] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783385, 'name': ReconfigVM_Task, 'duration_secs': 0.919047} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.034494] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620/1b70d077-7610-4524-ad72-ac68413f4620.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1310.034966] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17d22972-4549-4f51-8e16-5e031f5ae995 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.040875] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1310.040875] env[68233]: value = "task-2783386" [ 1310.040875] env[68233]: _type = "Task" [ 1310.040875] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.048134] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783386, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.551269] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783386, 'name': Rename_Task, 'duration_secs': 0.135156} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.551650] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1310.551773] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0969509-66ec-4753-9912-80cb59d60888 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.558962] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1310.558962] env[68233]: value = "task-2783387" [ 1310.558962] env[68233]: _type = "Task" [ 1310.558962] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.566983] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783387, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.068804] env[68233]: DEBUG oslo_vmware.api [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783387, 'name': PowerOnVM_Task, 'duration_secs': 0.432304} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.069085] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1311.069295] env[68233]: INFO nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 7.14 seconds to spawn the instance on the hypervisor. [ 1311.069473] env[68233]: DEBUG nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1311.070228] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222e3a99-6f0d-40c5-a972-0b344d4347a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.336055] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1311.336055] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559551', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'name': 'volume-284c0291-140c-4c8d-b14c-ada441c2c537', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'serial': '284c0291-140c-4c8d-b14c-ada441c2c537'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1311.336055] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c22650f7-be86-441c-bc9d-0bbb704f8e17 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.352248] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ecc3a6-3602-4cce-a63c-92a4a61f329a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.379846] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfiguring VM instance instance-0000007b to attach disk [datastore2] volume-284c0291-140c-4c8d-b14c-ada441c2c537/volume-284c0291-140c-4c8d-b14c-ada441c2c537.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1311.380440] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ccd9e11-42ca-4034-a4ea-adaabad47203 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.401016] env[68233]: DEBUG oslo_vmware.api [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1311.401016] env[68233]: value = "task-2783388" [ 1311.401016] env[68233]: _type = "Task" [ 1311.401016] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.408172] env[68233]: DEBUG oslo_vmware.api [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783388, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.587280] env[68233]: INFO nova.compute.manager [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 11.88 seconds to build instance. [ 1311.910544] env[68233]: DEBUG oslo_vmware.api [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783388, 'name': ReconfigVM_Task, 'duration_secs': 0.330214} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.910544] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfigured VM instance instance-0000007b to attach disk [datastore2] volume-284c0291-140c-4c8d-b14c-ada441c2c537/volume-284c0291-140c-4c8d-b14c-ada441c2c537.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1311.916196] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2691d524-31a5-4e07-a618-0bc5cd2ffe2a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.927115] env[68233]: DEBUG nova.compute.manager [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Received event network-changed-46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1311.927394] env[68233]: DEBUG nova.compute.manager [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Refreshing instance network info cache due to event network-changed-46866dde-e539-4c4c-9367-f7bc868ce8da. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1311.927546] env[68233]: DEBUG oslo_concurrency.lockutils [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.927687] env[68233]: DEBUG oslo_concurrency.lockutils [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1311.927845] env[68233]: DEBUG nova.network.neutron [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Refreshing network info cache for port 46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1311.934668] env[68233]: DEBUG oslo_vmware.api [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1311.934668] env[68233]: value = "task-2783389" [ 1311.934668] env[68233]: _type = "Task" [ 1311.934668] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.943863] env[68233]: DEBUG oslo_vmware.api [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.090637] env[68233]: DEBUG oslo_concurrency.lockutils [None req-2b084920-4c22-4cd5-a430-c319cf0b1c45 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.390s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1312.446245] env[68233]: DEBUG oslo_vmware.api [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783389, 'name': ReconfigVM_Task, 'duration_secs': 0.139782} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.446537] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559551', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'name': 'volume-284c0291-140c-4c8d-b14c-ada441c2c537', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'serial': '284c0291-140c-4c8d-b14c-ada441c2c537'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1312.630668] env[68233]: DEBUG nova.network.neutron [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updated VIF entry in instance network info cache for port 46866dde-e539-4c4c-9367-f7bc868ce8da. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1312.631036] env[68233]: DEBUG nova.network.neutron [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.134081] env[68233]: DEBUG oslo_concurrency.lockutils [req-3d4f2140-66c6-4700-8d39-c24fa45ef44d req-b7be1a2d-ac41-4a8c-97ba-07e522f055ee service nova] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1313.482033] env[68233]: DEBUG nova.objects.instance [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid 280bc403-3d10-4a29-9507-c548d9cf1d1a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1313.987113] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3a5ffae1-a658-483e-8c9b-3ff30c319a63 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.265s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1314.276627] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1314.276844] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1314.780168] env[68233]: INFO nova.compute.manager [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Detaching volume ef6e8b0e-52e0-4267-b53d-0ac6bc07b773 [ 1314.809607] env[68233]: INFO nova.virt.block_device [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Attempting to driver detach volume ef6e8b0e-52e0-4267-b53d-0ac6bc07b773 from mountpoint /dev/sdb [ 1314.809847] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1314.810046] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559546', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'name': 'volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'serial': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1314.810912] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ecfec8-821e-4574-a0af-2da7b10d6208 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.834817] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67d6df7-b4db-4c45-a3c0-69a7609e3bfa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.841307] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca33043-158b-4dc7-8424-f213311305d9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.865252] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49d057d-d5fb-4f5c-b4e9-86ea40d38ab4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.879206] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] The volume has not been displaced from its original location: [datastore2] volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773/volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1314.884232] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfiguring VM instance instance-0000007b to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1314.884490] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21358d7c-6c0d-4a57-8246-f21164caa0b4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.901119] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1314.901119] env[68233]: value = "task-2783390" [ 1314.901119] env[68233]: _type = "Task" [ 1314.901119] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.908248] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.410625] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783390, 'name': ReconfigVM_Task, 'duration_secs': 0.255436} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.410992] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfigured VM instance instance-0000007b to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1315.415645] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2dae5f51-650e-4bb4-9b19-c0079a1f2969 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.430255] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1315.430255] env[68233]: value = "task-2783391" [ 1315.430255] env[68233]: _type = "Task" [ 1315.430255] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.437622] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783391, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.940911] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783391, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.443330] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783391, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.940925] env[68233]: DEBUG oslo_vmware.api [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783391, 'name': ReconfigVM_Task, 'duration_secs': 1.13677} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.941231] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559546', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'name': 'volume-ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773', 'serial': 'ef6e8b0e-52e0-4267-b53d-0ac6bc07b773'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1317.481852] env[68233]: DEBUG nova.objects.instance [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid 280bc403-3d10-4a29-9507-c548d9cf1d1a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1318.490616] env[68233]: DEBUG oslo_concurrency.lockutils [None req-e95857c0-fb40-4585-98d6-6096eadc8378 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.214s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1318.541532] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1318.541860] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1319.045395] env[68233]: INFO nova.compute.manager [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Detaching volume 284c0291-140c-4c8d-b14c-ada441c2c537 [ 1319.075079] env[68233]: INFO nova.virt.block_device [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Attempting to driver detach volume 284c0291-140c-4c8d-b14c-ada441c2c537 from mountpoint /dev/sdc [ 1319.075333] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1319.075519] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559551', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'name': 'volume-284c0291-140c-4c8d-b14c-ada441c2c537', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'serial': '284c0291-140c-4c8d-b14c-ada441c2c537'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1319.076445] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0bc837-bee1-4f04-bbd6-258866a947dc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.098940] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa45e2c9-d3c7-41fc-a3ae-01c6a73b4f57 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.105542] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35411094-e50b-4da7-b33b-e247067b5e31 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.125658] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7235000e-91c6-4140-a448-494a8dbad0b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.139700] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] The volume has not been displaced from its original location: [datastore2] volume-284c0291-140c-4c8d-b14c-ada441c2c537/volume-284c0291-140c-4c8d-b14c-ada441c2c537.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1319.144772] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfiguring VM instance instance-0000007b to detach disk 2002 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1319.145055] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f94b49-3251-4c88-a7f2-ec98db325402 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.161636] env[68233]: DEBUG oslo_vmware.api [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1319.161636] env[68233]: value = "task-2783392" [ 1319.161636] env[68233]: _type = "Task" [ 1319.161636] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.169016] env[68233]: DEBUG oslo_vmware.api [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783392, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.672063] env[68233]: DEBUG oslo_vmware.api [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783392, 'name': ReconfigVM_Task, 'duration_secs': 0.201601} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.672063] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Reconfigured VM instance instance-0000007b to detach disk 2002 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1319.676369] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f106ce8-dfbc-4c9f-9f76-0e10d26f913d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.691286] env[68233]: DEBUG oslo_vmware.api [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1319.691286] env[68233]: value = "task-2783393" [ 1319.691286] env[68233]: _type = "Task" [ 1319.691286] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.698867] env[68233]: DEBUG oslo_vmware.api [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783393, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.113661] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.113899] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.114096] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.114287] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.114491] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.114693] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.114877] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.115040] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1320.115197] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.201075] env[68233]: DEBUG oslo_vmware.api [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783393, 'name': ReconfigVM_Task, 'duration_secs': 0.147519} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.201383] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559551', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'name': 'volume-284c0291-140c-4c8d-b14c-ada441c2c537', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '280bc403-3d10-4a29-9507-c548d9cf1d1a', 'attached_at': '', 'detached_at': '', 'volume_id': '284c0291-140c-4c8d-b14c-ada441c2c537', 'serial': '284c0291-140c-4c8d-b14c-ada441c2c537'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1320.617969] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.618242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.618682] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1320.618977] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1320.619988] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b9ded6-f9ed-4d93-97d3-c3af6adf3586 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.628713] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f234d0-d9a3-4417-8e64-a6672ca80522 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.646315] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fa622e-7269-411f-86b1-1edb4fb54292 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.653057] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fc0f27-758a-4121-b89e-0e5b272f77ee {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.680246] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179864MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1320.680565] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1320.680565] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1320.740412] env[68233]: DEBUG nova.objects.instance [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'flavor' on Instance uuid 280bc403-3d10-4a29-9507-c548d9cf1d1a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1321.706878] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 280bc403-3d10-4a29-9507-c548d9cf1d1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.707152] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance e905567b-c78e-4c21-b134-78f444e941f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.707200] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 1b70d077-7610-4524-ad72-ac68413f4620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1321.707348] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1321.707489] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1321.746704] env[68233]: DEBUG oslo_concurrency.lockutils [None req-9bce5e61-2849-4019-bfdb-596d02bd44e6 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.205s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.753640] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4400a9ce-a961-427b-a2e4-9d59c789b71e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.761177] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2759b0e-c229-4640-a4de-e271277f0258 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.791997] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf5e541-13a2-4daf-afd8-ab380240fc02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.799201] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab333f2-afb8-40ac-a615-e059c4f7f0c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.811928] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1322.315072] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1322.819866] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1322.820260] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.139s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.921953] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.922283] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.922499] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "280bc403-3d10-4a29-9507-c548d9cf1d1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1322.922684] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1322.922855] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1322.925044] env[68233]: INFO nova.compute.manager [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Terminating instance [ 1323.428807] env[68233]: DEBUG nova.compute.manager [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1323.429058] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1323.429953] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ebcd28-17d9-41f6-8cf0-73e01ee355b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.438021] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1323.438265] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c209b032-f042-4144-b15e-4a3a06cade27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.444295] env[68233]: DEBUG oslo_vmware.api [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1323.444295] env[68233]: value = "task-2783394" [ 1323.444295] env[68233]: _type = "Task" [ 1323.444295] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.451879] env[68233]: DEBUG oslo_vmware.api [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.954891] env[68233]: DEBUG oslo_vmware.api [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783394, 'name': PowerOffVM_Task, 'duration_secs': 0.240873} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.955242] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1323.955347] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1323.955601] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1f7edbfd-73e6-46f4-a4d1-4c648e4d3f3d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.022388] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1324.022598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1324.022777] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Deleting the datastore file [datastore2] 280bc403-3d10-4a29-9507-c548d9cf1d1a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1324.023042] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b714302-f868-4aa2-a67f-7a71ed604598 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.029235] env[68233]: DEBUG oslo_vmware.api [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for the task: (returnval){ [ 1324.029235] env[68233]: value = "task-2783396" [ 1324.029235] env[68233]: _type = "Task" [ 1324.029235] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.037072] env[68233]: DEBUG oslo_vmware.api [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783396, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.540496] env[68233]: DEBUG oslo_vmware.api [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Task: {'id': task-2783396, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132248} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.540735] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1324.540914] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1324.541102] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1324.541287] env[68233]: INFO nova.compute.manager [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1324.541519] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1324.541704] env[68233]: DEBUG nova.compute.manager [-] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1324.541797] env[68233]: DEBUG nova.network.neutron [-] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1324.991898] env[68233]: DEBUG nova.compute.manager [req-558b6f28-83a2-46cf-a0ce-4f4cebf0028f req-95d7e3a0-67e2-4bc7-8508-a565a9d38c5f service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Received event network-vif-deleted-fb60507e-d1d4-46ce-8f26-4219cd56b0ad {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1324.992160] env[68233]: INFO nova.compute.manager [req-558b6f28-83a2-46cf-a0ce-4f4cebf0028f req-95d7e3a0-67e2-4bc7-8508-a565a9d38c5f service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Neutron deleted interface fb60507e-d1d4-46ce-8f26-4219cd56b0ad; detaching it from the instance and deleting it from the info cache [ 1324.992160] env[68233]: DEBUG nova.network.neutron [req-558b6f28-83a2-46cf-a0ce-4f4cebf0028f req-95d7e3a0-67e2-4bc7-8508-a565a9d38c5f service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.467695] env[68233]: DEBUG nova.network.neutron [-] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1325.495755] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33fbf3f1-734d-4095-a0c1-be4713164b6e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.505782] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd50d924-2d7b-44f3-b51a-f23fa96b4dd8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.530951] env[68233]: DEBUG nova.compute.manager [req-558b6f28-83a2-46cf-a0ce-4f4cebf0028f req-95d7e3a0-67e2-4bc7-8508-a565a9d38c5f service nova] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Detach interface failed, port_id=fb60507e-d1d4-46ce-8f26-4219cd56b0ad, reason: Instance 280bc403-3d10-4a29-9507-c548d9cf1d1a could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1325.971119] env[68233]: INFO nova.compute.manager [-] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Took 1.43 seconds to deallocate network for instance. [ 1326.476690] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1326.477118] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1326.477205] env[68233]: DEBUG nova.objects.instance [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lazy-loading 'resources' on Instance uuid 280bc403-3d10-4a29-9507-c548d9cf1d1a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1327.033431] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d276192a-2020-4470-8701-6f3ca274557b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.041451] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbff8766-848b-4ba5-95e3-7245be94edbf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.072364] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954e2466-9591-4bf1-8713-e79cdc3b435e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.080189] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa88920-017b-420c-a29f-1eed4ef84c86 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.093268] env[68233]: DEBUG nova.compute.provider_tree [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1327.596263] env[68233]: DEBUG nova.scheduler.client.report [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1328.102112] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.624s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1328.121588] env[68233]: INFO nova.scheduler.client.report [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Deleted allocations for instance 280bc403-3d10-4a29-9507-c548d9cf1d1a [ 1328.629055] env[68233]: DEBUG oslo_concurrency.lockutils [None req-638307f8-d9f2-47bb-baf4-02b1129ec2b0 tempest-AttachVolumeTestJSON-473105679 tempest-AttachVolumeTestJSON-473105679-project-member] Lock "280bc403-3d10-4a29-9507-c548d9cf1d1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.707s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1345.072295] env[68233]: DEBUG oslo_concurrency.lockutils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1345.072626] env[68233]: DEBUG oslo_concurrency.lockutils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1345.575783] env[68233]: DEBUG nova.compute.utils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1346.079394] env[68233]: DEBUG oslo_concurrency.lockutils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1347.137798] env[68233]: DEBUG oslo_concurrency.lockutils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1347.138199] env[68233]: DEBUG oslo_concurrency.lockutils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.138311] env[68233]: INFO nova.compute.manager [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Attaching volume eb392fad-ea13-421f-9764-b35b5047f625 to /dev/sdb [ 1347.167911] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d9e389-0511-4946-9bda-26cfcba5177b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.175267] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f45f836-a43c-4417-9b95-7f4dcb8cab51 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.187702] env[68233]: DEBUG nova.virt.block_device [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updating existing volume attachment record: 336da9c7-cb76-4ac6-a096-49497be81022 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1350.808931] env[68233]: DEBUG nova.compute.manager [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1350.809881] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69067b3f-bb1e-4ec8-9839-ef3bf2a59a2f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.320620] env[68233]: INFO nova.compute.manager [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] instance snapshotting [ 1351.321267] env[68233]: DEBUG nova.objects.instance [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'flavor' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1351.732234] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1351.732477] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559552', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'name': 'volume-eb392fad-ea13-421f-9764-b35b5047f625', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e905567b-c78e-4c21-b134-78f444e941f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'serial': 'eb392fad-ea13-421f-9764-b35b5047f625'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1351.733352] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83f61a53-c0d9-4dac-9bd0-7fa0b0ee643d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.749391] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fe9e2b-788b-4ee1-aac5-1abb32e1b2a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.772381] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] volume-eb392fad-ea13-421f-9764-b35b5047f625/volume-eb392fad-ea13-421f-9764-b35b5047f625.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1351.772605] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ca25252-495a-436d-a48e-fff059a1a1e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.790266] env[68233]: DEBUG oslo_vmware.api [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1351.790266] env[68233]: value = "task-2783402" [ 1351.790266] env[68233]: _type = "Task" [ 1351.790266] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.798849] env[68233]: DEBUG oslo_vmware.api [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783402, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.827316] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a284dd80-ebcb-4ceb-8bbc-b57eda6e67ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.844578] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32fe853-d319-43e4-96fd-10d7ec0a08e8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.300636] env[68233]: DEBUG oslo_vmware.api [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783402, 'name': ReconfigVM_Task, 'duration_secs': 0.314016} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.300899] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Reconfigured VM instance instance-0000007c to attach disk [datastore2] volume-eb392fad-ea13-421f-9764-b35b5047f625/volume-eb392fad-ea13-421f-9764-b35b5047f625.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1352.305561] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dbb86b5b-0c38-4d74-8b1e-fbab3dbc95b9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.321715] env[68233]: DEBUG oslo_vmware.api [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1352.321715] env[68233]: value = "task-2783403" [ 1352.321715] env[68233]: _type = "Task" [ 1352.321715] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.329180] env[68233]: DEBUG oslo_vmware.api [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783403, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.354393] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1352.354671] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-139c35aa-d1dd-4897-bb9c-82ffb3c8b623 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.361259] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1352.361259] env[68233]: value = "task-2783404" [ 1352.361259] env[68233]: _type = "Task" [ 1352.361259] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.368642] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783404, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.831468] env[68233]: DEBUG oslo_vmware.api [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783403, 'name': ReconfigVM_Task, 'duration_secs': 0.130874} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.831868] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559552', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'name': 'volume-eb392fad-ea13-421f-9764-b35b5047f625', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e905567b-c78e-4c21-b134-78f444e941f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'serial': 'eb392fad-ea13-421f-9764-b35b5047f625'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1352.870245] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783404, 'name': CreateSnapshot_Task, 'duration_secs': 0.406944} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.870503] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1352.871243] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-babaeeff-7534-4909-86cf-0349b6e30b79 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.387457] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1353.387752] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-60a29ffa-1d57-4a12-859b-81f84db96da2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.396310] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1353.396310] env[68233]: value = "task-2783405" [ 1353.396310] env[68233]: _type = "Task" [ 1353.396310] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.406148] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783405, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.865841] env[68233]: DEBUG nova.objects.instance [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'flavor' on Instance uuid e905567b-c78e-4c21-b134-78f444e941f4 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1353.905612] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783405, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.370351] env[68233]: DEBUG oslo_concurrency.lockutils [None req-890c5634-d5fe-4bb4-bf9c-68788a1380a3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.232s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.406121] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783405, 'name': CloneVM_Task, 'duration_secs': 0.922041} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.406414] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created linked-clone VM from snapshot [ 1354.407139] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a059f83c-761e-4961-b4a6-a05bdb33a30a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.414316] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Uploading image 907a38f1-81af-467f-87d9-b88813c961e6 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1354.433736] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1354.433736] env[68233]: value = "vm-559554" [ 1354.433736] env[68233]: _type = "VirtualMachine" [ 1354.433736] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1354.433969] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e6053b00-3f18-4855-a4a9-1c5fe9e755da {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.440630] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease: (returnval){ [ 1354.440630] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527eb880-cf7c-0236-061c-8b59ceb01049" [ 1354.440630] env[68233]: _type = "HttpNfcLease" [ 1354.440630] env[68233]: } obtained for exporting VM: (result){ [ 1354.440630] env[68233]: value = "vm-559554" [ 1354.440630] env[68233]: _type = "VirtualMachine" [ 1354.440630] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1354.440910] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the lease: (returnval){ [ 1354.440910] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527eb880-cf7c-0236-061c-8b59ceb01049" [ 1354.440910] env[68233]: _type = "HttpNfcLease" [ 1354.440910] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1354.446966] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1354.446966] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527eb880-cf7c-0236-061c-8b59ceb01049" [ 1354.446966] env[68233]: _type = "HttpNfcLease" [ 1354.446966] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1354.612021] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1354.612288] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1354.948576] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1354.948576] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527eb880-cf7c-0236-061c-8b59ceb01049" [ 1354.948576] env[68233]: _type = "HttpNfcLease" [ 1354.948576] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1354.949163] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1354.949163] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]527eb880-cf7c-0236-061c-8b59ceb01049" [ 1354.949163] env[68233]: _type = "HttpNfcLease" [ 1354.949163] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1354.949593] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6fa207-4adc-4302-9beb-8cea24186dc6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.956817] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a553a3-8356-57c9-d093-8ce14861e8e7/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1354.956985] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a553a3-8356-57c9-d093-8ce14861e8e7/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1355.045201] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-666089cf-28b8-4adf-a41e-16914d74a130 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.115115] env[68233]: INFO nova.compute.manager [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Detaching volume eb392fad-ea13-421f-9764-b35b5047f625 [ 1355.143743] env[68233]: INFO nova.virt.block_device [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Attempting to driver detach volume eb392fad-ea13-421f-9764-b35b5047f625 from mountpoint /dev/sdb [ 1355.144031] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1355.144176] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559552', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'name': 'volume-eb392fad-ea13-421f-9764-b35b5047f625', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e905567b-c78e-4c21-b134-78f444e941f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'serial': 'eb392fad-ea13-421f-9764-b35b5047f625'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1355.145050] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc9762b-12c6-402f-8155-dad120b72643 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.165786] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d1d394-a34e-454a-b581-d3baf384ee35 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.172450] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d872e1df-c1c1-46d0-858f-1eb8dc25c28d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.194657] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7d6529-b65e-4169-aebd-781e9cbfd2af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.208906] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] The volume has not been displaced from its original location: [datastore2] volume-eb392fad-ea13-421f-9764-b35b5047f625/volume-eb392fad-ea13-421f-9764-b35b5047f625.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1355.213915] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Reconfiguring VM instance instance-0000007c to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1355.216369] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e2f540d-aff2-40e1-8a3d-78d343ed5098 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.234824] env[68233]: DEBUG oslo_vmware.api [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1355.234824] env[68233]: value = "task-2783407" [ 1355.234824] env[68233]: _type = "Task" [ 1355.234824] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.242681] env[68233]: DEBUG oslo_vmware.api [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783407, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.745374] env[68233]: DEBUG oslo_vmware.api [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783407, 'name': ReconfigVM_Task, 'duration_secs': 0.227707} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.745766] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Reconfigured VM instance instance-0000007c to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1355.750787] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72d49c99-652d-4c1d-b039-ce0f4f187865 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.765524] env[68233]: DEBUG oslo_vmware.api [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1355.765524] env[68233]: value = "task-2783408" [ 1355.765524] env[68233]: _type = "Task" [ 1355.765524] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.773438] env[68233]: DEBUG oslo_vmware.api [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783408, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.275339] env[68233]: DEBUG oslo_vmware.api [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783408, 'name': ReconfigVM_Task, 'duration_secs': 0.164962} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.275709] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559552', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'name': 'volume-eb392fad-ea13-421f-9764-b35b5047f625', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e905567b-c78e-4c21-b134-78f444e941f4', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb392fad-ea13-421f-9764-b35b5047f625', 'serial': 'eb392fad-ea13-421f-9764-b35b5047f625'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1356.817015] env[68233]: DEBUG nova.objects.instance [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'flavor' on Instance uuid e905567b-c78e-4c21-b134-78f444e941f4 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1357.823917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-ab3e2ba9-0841-4342-8239-737d8a9104e4 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.211s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.853550] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.853917] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.853982] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "e905567b-c78e-4c21-b134-78f444e941f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.854171] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.854340] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1358.856544] env[68233]: INFO nova.compute.manager [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Terminating instance [ 1359.360273] env[68233]: DEBUG nova.compute.manager [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1359.360551] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.361449] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bc4200-06ec-4221-a98d-b6b3086387ba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.370803] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.371053] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7b80704-b527-4e36-a92b-bca1c5dd0123 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.377281] env[68233]: DEBUG oslo_vmware.api [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1359.377281] env[68233]: value = "task-2783409" [ 1359.377281] env[68233]: _type = "Task" [ 1359.377281] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.385194] env[68233]: DEBUG oslo_vmware.api [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783409, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.886924] env[68233]: DEBUG oslo_vmware.api [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783409, 'name': PowerOffVM_Task, 'duration_secs': 0.340192} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.887294] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.887371] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.887627] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f220c202-9261-4b5f-99aa-8df193d3dac2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.492084] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1360.492084] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1360.492360] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleting the datastore file [datastore2] e905567b-c78e-4c21-b134-78f444e941f4 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1360.492474] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66ec64ee-a4dd-4eec-81de-2a162a814723 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.499161] env[68233]: DEBUG oslo_vmware.api [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1360.499161] env[68233]: value = "task-2783411" [ 1360.499161] env[68233]: _type = "Task" [ 1360.499161] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.507491] env[68233]: DEBUG oslo_vmware.api [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.009464] env[68233]: DEBUG oslo_vmware.api [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.376569} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.009863] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.009961] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1361.010154] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1361.010332] env[68233]: INFO nova.compute.manager [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1361.010576] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1361.010768] env[68233]: DEBUG nova.compute.manager [-] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1361.010864] env[68233]: DEBUG nova.network.neutron [-] [instance: e905567b-c78e-4c21-b134-78f444e941f4] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1361.476855] env[68233]: DEBUG nova.compute.manager [req-42679400-0ae2-4123-ad41-28eddde876da req-b0e4b947-2756-4adf-9340-47f0d156de04 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Received event network-vif-deleted-c3822ece-d683-4d7f-a027-e064a545d92e {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1361.477091] env[68233]: INFO nova.compute.manager [req-42679400-0ae2-4123-ad41-28eddde876da req-b0e4b947-2756-4adf-9340-47f0d156de04 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Neutron deleted interface c3822ece-d683-4d7f-a027-e064a545d92e; detaching it from the instance and deleting it from the info cache [ 1361.477298] env[68233]: DEBUG nova.network.neutron [req-42679400-0ae2-4123-ad41-28eddde876da req-b0e4b947-2756-4adf-9340-47f0d156de04 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.958601] env[68233]: DEBUG nova.network.neutron [-] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.980881] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-63e96225-852e-46f9-910e-c575b9ae77f5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.991280] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f01b4b29-a257-4de0-b35c-c294b87d3ceb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.017287] env[68233]: DEBUG nova.compute.manager [req-42679400-0ae2-4123-ad41-28eddde876da req-b0e4b947-2756-4adf-9340-47f0d156de04 service nova] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Detach interface failed, port_id=c3822ece-d683-4d7f-a027-e064a545d92e, reason: Instance e905567b-c78e-4c21-b134-78f444e941f4 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1362.461386] env[68233]: INFO nova.compute.manager [-] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Took 1.45 seconds to deallocate network for instance. [ 1362.968375] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1362.968679] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1362.968930] env[68233]: DEBUG nova.objects.instance [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'resources' on Instance uuid e905567b-c78e-4c21-b134-78f444e941f4 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.032773] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a553a3-8356-57c9-d093-8ce14861e8e7/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1363.033872] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fbc861-785d-40ff-bfa8-d3ab6b653b64 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.040074] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a553a3-8356-57c9-d093-8ce14861e8e7/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1363.040213] env[68233]: ERROR oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a553a3-8356-57c9-d093-8ce14861e8e7/disk-0.vmdk due to incomplete transfer. [ 1363.040443] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-38444dcb-54d4-41aa-9dc9-5776bfa0f777 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.048336] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a553a3-8356-57c9-d093-8ce14861e8e7/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1363.048528] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Uploaded image 907a38f1-81af-467f-87d9-b88813c961e6 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1363.050855] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1363.051092] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fce915fe-af36-4bb6-89ac-3ba6cdeea04c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.056841] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1363.056841] env[68233]: value = "task-2783412" [ 1363.056841] env[68233]: _type = "Task" [ 1363.056841] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.066568] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783412, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.512288] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747dffdf-79ba-4210-be14-7d1db258587c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.519771] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c346af5a-3fb6-45e9-a22d-5c737b433371 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.549197] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b2887a-ffbc-4ac8-988e-c2f6ddad81a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.556176] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24588e4f-1b84-410a-ae8c-d0d797c8a35d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.571728] env[68233]: DEBUG nova.compute.provider_tree [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.576018] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783412, 'name': Destroy_Task, 'duration_secs': 0.344778} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.576529] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroyed the VM [ 1363.576772] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1363.577009] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dc39d604-eda4-4a12-8c20-799514c17095 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.583131] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1363.583131] env[68233]: value = "task-2783413" [ 1363.583131] env[68233]: _type = "Task" [ 1363.583131] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.590531] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783413, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.077639] env[68233]: DEBUG nova.scheduler.client.report [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1364.092642] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783413, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.583106] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.614s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.595763] env[68233]: DEBUG oslo_vmware.api [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783413, 'name': RemoveSnapshot_Task, 'duration_secs': 0.78519} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.596024] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1364.596261] env[68233]: INFO nova.compute.manager [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 12.77 seconds to snapshot the instance on the hypervisor. [ 1364.607319] env[68233]: INFO nova.scheduler.client.report [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted allocations for instance e905567b-c78e-4c21-b134-78f444e941f4 [ 1365.115030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-de864f85-999e-4975-a751-b17c8aa728e3 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "e905567b-c78e-4c21-b134-78f444e941f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.261s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.155550] env[68233]: DEBUG nova.compute.manager [None req-8839eb39-d012-4924-a38f-2ab7fa193f77 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Found 2 images (rotation: 2) {{(pid=68233) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1365.627756] env[68233]: DEBUG nova.compute.manager [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1365.628707] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddffa948-9f03-4a71-981e-78851b1f37f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.140578] env[68233]: INFO nova.compute.manager [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] instance snapshotting [ 1366.140882] env[68233]: DEBUG nova.objects.instance [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'flavor' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.648020] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebdf2f45-afc6-457f-ae83-d0a1707b3056 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.666350] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13865ca8-ff58-454d-86fa-fcfe791e0127 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.860977] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.861233] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.176699] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1367.177105] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8d91a38e-03be-4ccf-872b-0905ada4292f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.185520] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1367.185520] env[68233]: value = "task-2783414" [ 1367.185520] env[68233]: _type = "Task" [ 1367.185520] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.193406] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783414, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.363259] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1367.695556] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783414, 'name': CreateSnapshot_Task, 'duration_secs': 0.446779} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.695812] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1367.696571] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba40894-e5e1-4d7d-a88b-b06f6d579633 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.884614] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1367.884894] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.886404] env[68233]: INFO nova.compute.claims [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.213045] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1368.213045] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b3ed386f-7767-4353-854c-aaa673d0d6fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.221521] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1368.221521] env[68233]: value = "task-2783415" [ 1368.221521] env[68233]: _type = "Task" [ 1368.221521] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.228887] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783415, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.732792] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783415, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.932204] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c55c51-5b5b-4885-b7d2-aa2381016a08 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.939877] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2966cf9e-24d7-4798-b8e7-35a0cb63f747 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.970472] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52e56d00-ae1f-400c-9b82-121deb06950d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.977544] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71501f52-1b78-455c-b0fa-5e3ff6cb5273 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.990617] env[68233]: DEBUG nova.compute.provider_tree [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1369.232960] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783415, 'name': CloneVM_Task, 'duration_secs': 0.914879} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.235035] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created linked-clone VM from snapshot [ 1369.235035] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3759ad6-ecfa-4832-afc4-3dd44ba91a76 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.241579] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Uploading image 9b97b78e-cb75-4cd7-bf66-250526a003fe {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1369.260861] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1369.260861] env[68233]: value = "vm-559556" [ 1369.260861] env[68233]: _type = "VirtualMachine" [ 1369.260861] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1369.261115] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-36ea8af1-f9e0-4ee2-81d7-c87a43f52b4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.267139] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease: (returnval){ [ 1369.267139] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d27373-afb7-0be1-5a66-b0bc9f1f7e8b" [ 1369.267139] env[68233]: _type = "HttpNfcLease" [ 1369.267139] env[68233]: } obtained for exporting VM: (result){ [ 1369.267139] env[68233]: value = "vm-559556" [ 1369.267139] env[68233]: _type = "VirtualMachine" [ 1369.267139] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1369.267423] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the lease: (returnval){ [ 1369.267423] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d27373-afb7-0be1-5a66-b0bc9f1f7e8b" [ 1369.267423] env[68233]: _type = "HttpNfcLease" [ 1369.267423] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1369.273179] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1369.273179] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d27373-afb7-0be1-5a66-b0bc9f1f7e8b" [ 1369.273179] env[68233]: _type = "HttpNfcLease" [ 1369.273179] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1369.493686] env[68233]: DEBUG nova.scheduler.client.report [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1369.775998] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1369.775998] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d27373-afb7-0be1-5a66-b0bc9f1f7e8b" [ 1369.775998] env[68233]: _type = "HttpNfcLease" [ 1369.775998] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1369.776411] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1369.776411] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d27373-afb7-0be1-5a66-b0bc9f1f7e8b" [ 1369.776411] env[68233]: _type = "HttpNfcLease" [ 1369.776411] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1369.776963] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d417ae6-8b0a-455f-a591-7e2e23462b61 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.783775] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522cc542-6e56-957e-b9d5-310f4bf8c13a/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1369.783947] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522cc542-6e56-957e-b9d5-310f4bf8c13a/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1369.868747] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-266b9c40-1822-4231-a511-cf365e306c03 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.998374] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.113s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.998889] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1370.504489] env[68233]: DEBUG nova.compute.utils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1370.506088] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1370.506358] env[68233]: DEBUG nova.network.neutron [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1370.569253] env[68233]: DEBUG nova.policy [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5879d5d831004ae3b4273284da66358d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd564a0ed01a84ffca782d1344faba070', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1370.836122] env[68233]: DEBUG nova.network.neutron [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Successfully created port: 0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1371.009719] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1372.019817] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1372.046880] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1372.047145] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1372.047402] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1372.047488] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1372.047637] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1372.047789] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1372.047995] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1372.048170] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1372.048337] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1372.048499] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1372.048669] env[68233]: DEBUG nova.virt.hardware [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1372.049595] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f970968-e23c-41a9-a7e5-5fd16701a276 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.057356] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54253a16-180d-4f05-91ac-d6f859185510 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.206433] env[68233]: DEBUG nova.compute.manager [req-b318eb29-6255-4dc3-8878-984fb13d583d req-329ffd7e-e63e-42ea-8e64-6cf8bf596437 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Received event network-vif-plugged-0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1372.206822] env[68233]: DEBUG oslo_concurrency.lockutils [req-b318eb29-6255-4dc3-8878-984fb13d583d req-329ffd7e-e63e-42ea-8e64-6cf8bf596437 service nova] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1372.207035] env[68233]: DEBUG oslo_concurrency.lockutils [req-b318eb29-6255-4dc3-8878-984fb13d583d req-329ffd7e-e63e-42ea-8e64-6cf8bf596437 service nova] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1372.207245] env[68233]: DEBUG oslo_concurrency.lockutils [req-b318eb29-6255-4dc3-8878-984fb13d583d req-329ffd7e-e63e-42ea-8e64-6cf8bf596437 service nova] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1372.207423] env[68233]: DEBUG nova.compute.manager [req-b318eb29-6255-4dc3-8878-984fb13d583d req-329ffd7e-e63e-42ea-8e64-6cf8bf596437 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] No waiting events found dispatching network-vif-plugged-0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1372.207596] env[68233]: WARNING nova.compute.manager [req-b318eb29-6255-4dc3-8878-984fb13d583d req-329ffd7e-e63e-42ea-8e64-6cf8bf596437 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Received unexpected event network-vif-plugged-0db72888-7a72-4956-a3fc-9195096eb3f4 for instance with vm_state building and task_state spawning. [ 1372.292936] env[68233]: DEBUG nova.network.neutron [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Successfully updated port: 0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1372.795724] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.795873] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1372.796939] env[68233]: DEBUG nova.network.neutron [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1373.328050] env[68233]: DEBUG nova.network.neutron [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1373.452075] env[68233]: DEBUG nova.network.neutron [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updating instance_info_cache with network_info: [{"id": "0db72888-7a72-4956-a3fc-9195096eb3f4", "address": "fa:16:3e:b7:1d:6e", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db72888-7a", "ovs_interfaceid": "0db72888-7a72-4956-a3fc-9195096eb3f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.955243] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1373.955622] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Instance network_info: |[{"id": "0db72888-7a72-4956-a3fc-9195096eb3f4", "address": "fa:16:3e:b7:1d:6e", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db72888-7a", "ovs_interfaceid": "0db72888-7a72-4956-a3fc-9195096eb3f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1373.956139] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:1d:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1a9ee6f9-33be-4f58-8248-694024ec31d4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0db72888-7a72-4956-a3fc-9195096eb3f4', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1373.963837] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1373.964097] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1373.964346] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-52365ad1-cf0f-4ff1-ba77-c28d4e107309 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.985446] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1373.985446] env[68233]: value = "task-2783417" [ 1373.985446] env[68233]: _type = "Task" [ 1373.985446] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.993627] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783417, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.234381] env[68233]: DEBUG nova.compute.manager [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Received event network-changed-0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1374.234671] env[68233]: DEBUG nova.compute.manager [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Refreshing instance network info cache due to event network-changed-0db72888-7a72-4956-a3fc-9195096eb3f4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1374.234900] env[68233]: DEBUG oslo_concurrency.lockutils [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] Acquiring lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.235243] env[68233]: DEBUG oslo_concurrency.lockutils [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] Acquired lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.235502] env[68233]: DEBUG nova.network.neutron [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Refreshing network info cache for port 0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1374.496344] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783417, 'name': CreateVM_Task, 'duration_secs': 0.367918} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.496817] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1374.497315] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.497557] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1374.497906] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1374.498524] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33b485fe-eb5a-406a-951c-dcfd73eed116 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.503207] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1374.503207] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523fae12-97e6-0ca1-5c6f-a7555c6419b0" [ 1374.503207] env[68233]: _type = "Task" [ 1374.503207] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.512035] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523fae12-97e6-0ca1-5c6f-a7555c6419b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.918877] env[68233]: DEBUG nova.network.neutron [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updated VIF entry in instance network info cache for port 0db72888-7a72-4956-a3fc-9195096eb3f4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1374.919255] env[68233]: DEBUG nova.network.neutron [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updating instance_info_cache with network_info: [{"id": "0db72888-7a72-4956-a3fc-9195096eb3f4", "address": "fa:16:3e:b7:1d:6e", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db72888-7a", "ovs_interfaceid": "0db72888-7a72-4956-a3fc-9195096eb3f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.013086] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]523fae12-97e6-0ca1-5c6f-a7555c6419b0, 'name': SearchDatastore_Task, 'duration_secs': 0.012608} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.013382] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1375.013611] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1375.013849] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.014046] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1375.014240] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.014533] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-365d7f3c-04bf-4ff9-8c1a-f5f785a70985 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.022681] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.022814] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1375.023499] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf1f2d36-7bf7-4123-b541-2ac2cc6fd72c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.028306] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1375.028306] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52023d24-641f-a984-cd4d-d0367f5829b9" [ 1375.028306] env[68233]: _type = "Task" [ 1375.028306] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.035447] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52023d24-641f-a984-cd4d-d0367f5829b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.422483] env[68233]: DEBUG oslo_concurrency.lockutils [req-7e3bb10a-16ae-49b3-82e7-c66f42e74e12 req-19b281e0-6d6e-4004-b7ad-f7b859c1b4f2 service nova] Releasing lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1375.538976] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52023d24-641f-a984-cd4d-d0367f5829b9, 'name': SearchDatastore_Task, 'duration_secs': 0.009034} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.539747] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72a9b48a-01f2-4ee0-ac39-838516e06c7e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.544611] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1375.544611] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52760d03-d9af-f5fd-5922-5edea4c9027d" [ 1375.544611] env[68233]: _type = "Task" [ 1375.544611] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.552206] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52760d03-d9af-f5fd-5922-5edea4c9027d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.055712] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52760d03-d9af-f5fd-5922-5edea4c9027d, 'name': SearchDatastore_Task, 'duration_secs': 0.012187} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.055980] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1376.056252] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042/cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1376.056519] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e65154c-95e1-49b1-832e-0be8258cf35c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.063903] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1376.063903] env[68233]: value = "task-2783418" [ 1376.063903] env[68233]: _type = "Task" [ 1376.063903] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.072220] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.574153] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783418, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.059637] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522cc542-6e56-957e-b9d5-310f4bf8c13a/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1377.060597] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9919620-798b-4bac-8d7f-9045f8d5e46c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.066863] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522cc542-6e56-957e-b9d5-310f4bf8c13a/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1377.066992] env[68233]: ERROR oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522cc542-6e56-957e-b9d5-310f4bf8c13a/disk-0.vmdk due to incomplete transfer. [ 1377.069763] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f50eb90f-a11a-4f91-b945-bca564e43ed2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.076479] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783418, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532143} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.077430] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042/cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1377.077702] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1377.077955] env[68233]: DEBUG oslo_vmware.rw_handles [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522cc542-6e56-957e-b9d5-310f4bf8c13a/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1377.078146] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Uploaded image 9b97b78e-cb75-4cd7-bf66-250526a003fe to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1377.080608] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1377.080847] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-91f8e3a4-4933-4f33-b73d-4615f481b442 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.082491] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1ceac92d-ab2b-42d1-81f3-a237478f7bfe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.088175] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1377.088175] env[68233]: value = "task-2783420" [ 1377.088175] env[68233]: _type = "Task" [ 1377.088175] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.089302] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1377.089302] env[68233]: value = "task-2783419" [ 1377.089302] env[68233]: _type = "Task" [ 1377.089302] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.099058] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.101879] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783420, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.601089] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783420, 'name': Destroy_Task, 'duration_secs': 0.384134} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.603829] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroyed the VM [ 1377.604087] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1377.604348] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094504} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.604574] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0f4f9441-57f4-4456-b231-cb39fd6b3791 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.606013] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1377.606766] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc14932-5522-4f29-953e-0b211234a6c3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.629675] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042/cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.630842] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe4445b8-7d93-483f-9f30-1fc54ca102cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.644551] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1377.644551] env[68233]: value = "task-2783421" [ 1377.644551] env[68233]: _type = "Task" [ 1377.644551] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.648764] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1377.648764] env[68233]: value = "task-2783422" [ 1377.648764] env[68233]: _type = "Task" [ 1377.648764] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.654228] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783421, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.658536] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783422, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.155042] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783421, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.159672] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783422, 'name': ReconfigVM_Task, 'duration_secs': 0.312987} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.159925] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Reconfigured VM instance instance-0000007e to attach disk [datastore2] cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042/cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.160548] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cb27641f-9ffb-4999-a38b-df3c16da4be4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.166050] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1378.166050] env[68233]: value = "task-2783423" [ 1378.166050] env[68233]: _type = "Task" [ 1378.166050] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.173770] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783423, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.655281] env[68233]: DEBUG oslo_vmware.api [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783421, 'name': RemoveSnapshot_Task, 'duration_secs': 0.567251} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.655605] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1378.655605] env[68233]: INFO nova.compute.manager [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 12.01 seconds to snapshot the instance on the hypervisor. [ 1378.674479] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783423, 'name': Rename_Task, 'duration_secs': 0.202714} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.674722] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1378.674955] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a5680e9-0ef7-4e11-b849-beb4798a4c6d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.681380] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1378.681380] env[68233]: value = "task-2783424" [ 1378.681380] env[68233]: _type = "Task" [ 1378.681380] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.690149] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783424, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.190531] env[68233]: DEBUG oslo_vmware.api [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783424, 'name': PowerOnVM_Task, 'duration_secs': 0.452491} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.192027] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.192027] env[68233]: INFO nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Took 7.17 seconds to spawn the instance on the hypervisor. [ 1379.192027] env[68233]: DEBUG nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1379.192214] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415407a0-81b3-4acd-a0da-85f26aea2c5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.208898] env[68233]: DEBUG nova.compute.manager [None req-f94ff791-2024-4ace-b627-9b60d8fa30a6 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Found 2 images (rotation: 2) {{(pid=68233) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1379.714729] env[68233]: INFO nova.compute.manager [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Took 11.85 seconds to build instance. [ 1380.094542] env[68233]: DEBUG nova.compute.manager [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Received event network-changed-0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1380.094542] env[68233]: DEBUG nova.compute.manager [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Refreshing instance network info cache due to event network-changed-0db72888-7a72-4956-a3fc-9195096eb3f4. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1380.094671] env[68233]: DEBUG oslo_concurrency.lockutils [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] Acquiring lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.094725] env[68233]: DEBUG oslo_concurrency.lockutils [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] Acquired lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1380.094877] env[68233]: DEBUG nova.network.neutron [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Refreshing network info cache for port 0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1380.216712] env[68233]: DEBUG oslo_concurrency.lockutils [None req-5f18497b-01a5-4d1e-8a0b-9cbe91f7fd92 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.355s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.802198] env[68233]: DEBUG nova.network.neutron [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updated VIF entry in instance network info cache for port 0db72888-7a72-4956-a3fc-9195096eb3f4. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1380.802563] env[68233]: DEBUG nova.network.neutron [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updating instance_info_cache with network_info: [{"id": "0db72888-7a72-4956-a3fc-9195096eb3f4", "address": "fa:16:3e:b7:1d:6e", "network": {"id": "aaeecb6a-9d22-4e9a-8374-346c5eaffd95", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-467131393-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d564a0ed01a84ffca782d1344faba070", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1a9ee6f9-33be-4f58-8248-694024ec31d4", "external-id": "nsx-vlan-transportzone-581", "segmentation_id": 581, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0db72888-7a", "ovs_interfaceid": "0db72888-7a72-4956-a3fc-9195096eb3f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.849808] env[68233]: DEBUG nova.compute.manager [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1380.850760] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877aa508-d407-4c58-bb27-d8ead356ce94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.304872] env[68233]: DEBUG oslo_concurrency.lockutils [req-c67b399e-8da6-41db-9811-c6bfe9f1db69 req-ec6025a4-83fe-42bf-951d-ade22c09cebf service nova] Releasing lock "refresh_cache-cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1381.361336] env[68233]: INFO nova.compute.manager [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] instance snapshotting [ 1381.361975] env[68233]: DEBUG nova.objects.instance [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'flavor' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1381.817268] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.817704] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1381.868455] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f419af91-e4ed-4190-9941-43b53058db31 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.888882] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182a089b-d106-481d-9f6e-9ffd43c90e75 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.323051] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.323051] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.323051] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.323051] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.323396] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.323396] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.323504] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1382.323918] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1382.399458] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1382.400019] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e9a3ee89-5432-4483-a622-501d0958f25b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.407056] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1382.407056] env[68233]: value = "task-2783425" [ 1382.407056] env[68233]: _type = "Task" [ 1382.407056] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.414625] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783425, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.827030] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.827412] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.827412] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.827580] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1382.828474] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd360f5-5e45-45c3-885d-659bbda8d423 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.836310] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5441ccca-88d6-425e-b542-6f56ad57ae94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.850791] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe42e98-392f-42ef-b4a5-34ca4075f72e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.856645] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44c2d07-69ef-42fd-99d0-aba0b7248a38 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.884645] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180801MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1382.884784] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1382.884993] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1382.915806] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783425, 'name': CreateSnapshot_Task, 'duration_secs': 0.422845} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.916054] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1382.916756] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4958487-5853-40af-a43a-8ac0a2b272b1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.433217] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1383.433753] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-289db89c-7c4d-4bbd-a2a8-2b31d903d101 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.442442] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1383.442442] env[68233]: value = "task-2783426" [ 1383.442442] env[68233]: _type = "Task" [ 1383.442442] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.450404] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783426, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.910649] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 1b70d077-7610-4524-ad72-ac68413f4620 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1383.911024] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1383.911024] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1383.911182] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1383.950904] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bda174c-dcff-4c24-9c7b-f7c1826c6782 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.959013] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783426, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.964554] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b23aad-b802-4a0e-a12d-4ae0fed353ea {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.003161] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68779ed-d2dc-4af6-9834-70ab517fc527 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.010247] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dd923b-ea15-43ca-8118-f6551fd9c6c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.024157] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.454422] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783426, 'name': CloneVM_Task, 'duration_secs': 0.928937} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.454739] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Created linked-clone VM from snapshot [ 1384.455518] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f81671-6811-4166-b6e5-0d6e28975321 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.462837] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Uploading image 6f4a9d68-d159-4c90-9a76-bc47f5355524 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1384.486750] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1384.486750] env[68233]: value = "vm-559559" [ 1384.486750] env[68233]: _type = "VirtualMachine" [ 1384.486750] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1384.487167] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-434a76d9-691a-4656-b840-630fc5255f19 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.493124] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease: (returnval){ [ 1384.493124] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240ad08-db60-d0dd-16b6-9b586deb2043" [ 1384.493124] env[68233]: _type = "HttpNfcLease" [ 1384.493124] env[68233]: } obtained for exporting VM: (result){ [ 1384.493124] env[68233]: value = "vm-559559" [ 1384.493124] env[68233]: _type = "VirtualMachine" [ 1384.493124] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1384.493448] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the lease: (returnval){ [ 1384.493448] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240ad08-db60-d0dd-16b6-9b586deb2043" [ 1384.493448] env[68233]: _type = "HttpNfcLease" [ 1384.493448] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1384.499176] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1384.499176] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240ad08-db60-d0dd-16b6-9b586deb2043" [ 1384.499176] env[68233]: _type = "HttpNfcLease" [ 1384.499176] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1384.527093] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1385.001612] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1385.001612] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240ad08-db60-d0dd-16b6-9b586deb2043" [ 1385.001612] env[68233]: _type = "HttpNfcLease" [ 1385.001612] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1385.002121] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1385.002121] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5240ad08-db60-d0dd-16b6-9b586deb2043" [ 1385.002121] env[68233]: _type = "HttpNfcLease" [ 1385.002121] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1385.002629] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770fc1d0-b66e-4421-b9c4-bcc18ede0666 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.009379] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255faa7-ef69-beb6-61d3-10996e5457e0/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1385.009505] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255faa7-ef69-beb6-61d3-10996e5457e0/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1385.065160] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1385.065348] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.180s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.096600] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d7741166-166f-43f7-853f-19bf4ef6973b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.223337] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255faa7-ef69-beb6-61d3-10996e5457e0/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1392.224320] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecd3877-a469-4f6f-ac7e-f42e80e9537f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.230933] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255faa7-ef69-beb6-61d3-10996e5457e0/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1392.231112] env[68233]: ERROR oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255faa7-ef69-beb6-61d3-10996e5457e0/disk-0.vmdk due to incomplete transfer. [ 1392.231323] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5a5da453-2110-4a74-af66-0cdbe7edec2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.238400] env[68233]: DEBUG oslo_vmware.rw_handles [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5255faa7-ef69-beb6-61d3-10996e5457e0/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1392.238566] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Uploaded image 6f4a9d68-d159-4c90-9a76-bc47f5355524 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1392.240931] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1392.241166] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-abc79f5f-5538-4ee4-a133-4bf5f9e57065 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.246265] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1392.246265] env[68233]: value = "task-2783428" [ 1392.246265] env[68233]: _type = "Task" [ 1392.246265] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.255190] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783428, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.756956] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783428, 'name': Destroy_Task} progress is 33%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.257379] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783428, 'name': Destroy_Task, 'duration_secs': 0.564193} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.257791] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroyed the VM [ 1393.257791] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1393.258031] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-64c98c16-6c9f-4679-9f79-f798b491f629 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.263924] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1393.263924] env[68233]: value = "task-2783429" [ 1393.263924] env[68233]: _type = "Task" [ 1393.263924] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.270986] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783429, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.773984] env[68233]: DEBUG oslo_vmware.api [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783429, 'name': RemoveSnapshot_Task, 'duration_secs': 0.472814} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.774279] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1393.774511] env[68233]: INFO nova.compute.manager [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 11.91 seconds to snapshot the instance on the hypervisor. [ 1394.320285] env[68233]: DEBUG nova.compute.manager [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Found 3 images (rotation: 2) {{(pid=68233) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1394.320518] env[68233]: DEBUG nova.compute.manager [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Rotating out 1 backups {{(pid=68233) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1394.320647] env[68233]: DEBUG nova.compute.manager [None req-96a32886-02f9-4a06-8aa1-478d5dae84da tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleting image 907a38f1-81af-467f-87d9-b88813c961e6 {{(pid=68233) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1395.871348] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.871694] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.871867] env[68233]: DEBUG nova.compute.manager [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1395.872826] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298c951c-c17e-498a-9da3-06f8936d1d21 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.879749] env[68233]: DEBUG nova.compute.manager [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68233) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1395.880309] env[68233]: DEBUG nova.objects.instance [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'flavor' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1396.886965] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1396.887393] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f0bcba0-1b10-418e-95aa-bc95d3acd9bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.894126] env[68233]: DEBUG oslo_vmware.api [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1396.894126] env[68233]: value = "task-2783430" [ 1396.894126] env[68233]: _type = "Task" [ 1396.894126] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.901733] env[68233]: DEBUG oslo_vmware.api [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783430, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.403667] env[68233]: DEBUG oslo_vmware.api [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783430, 'name': PowerOffVM_Task, 'duration_secs': 0.19659} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.403932] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1397.404215] env[68233]: DEBUG nova.compute.manager [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1397.404992] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9ff349-512f-41d3-806a-53df78d60b3f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.916434] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1486036e-0ce9-46e7-b0ba-9c9a32578fbd tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.045s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1399.091571] env[68233]: DEBUG nova.compute.manager [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Stashing vm_state: stopped {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1399.611308] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1399.611573] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1400.117063] env[68233]: INFO nova.compute.claims [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1400.622658] env[68233]: INFO nova.compute.resource_tracker [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating resource usage from migration cf2f54f0-56e6-4d31-ac2d-ef6674d6631d [ 1400.670131] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d15d9666-7725-45d0-aa39-506e3d74a50c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.677659] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71483b4b-3094-4482-96d3-be2bc2c486a7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.708121] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6276e56-9b2f-4553-a8d0-0339e69fb181 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.715180] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75e088d-464f-41ea-afe6-60e774dd2d4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.727883] env[68233]: DEBUG nova.compute.provider_tree [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.230989] env[68233]: DEBUG nova.scheduler.client.report [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1401.736013] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.124s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1401.736300] env[68233]: INFO nova.compute.manager [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Migrating [ 1402.251422] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.251699] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1402.251890] env[68233]: DEBUG nova.network.neutron [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.945088] env[68233]: DEBUG nova.network.neutron [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.448319] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1404.962165] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7754dcf-3924-44df-a7a5-617f073cf118 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.986046] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1405.491990] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1405.492335] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b311db7-4785-4b75-8831-87442ab5384e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.500425] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1405.500425] env[68233]: value = "task-2783431" [ 1405.500425] env[68233]: _type = "Task" [ 1405.500425] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.507828] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783431, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.010892] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1406.011246] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1406.517230] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1406.517476] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1406.517630] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1406.517821] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1406.518092] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1406.518187] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1406.518409] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1406.518576] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1406.518743] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1406.518906] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1406.519093] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1406.523928] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38ddb5ab-9b78-4d04-aab3-3a296152307b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.539280] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1406.539280] env[68233]: value = "task-2783432" [ 1406.539280] env[68233]: _type = "Task" [ 1406.539280] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.546927] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783432, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.048769] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783432, 'name': ReconfigVM_Task, 'duration_secs': 0.169781} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.049175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1407.556103] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1407.556103] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1407.556403] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1407.556504] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1407.556607] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1407.556751] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1407.556976] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1407.557152] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1407.557321] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1407.557485] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1407.557657] env[68233]: DEBUG nova.virt.hardware [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1407.562775] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1407.563074] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09905fc5-6af1-43f3-ba0b-ff154fe276ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.580993] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1407.580993] env[68233]: value = "task-2783433" [ 1407.580993] env[68233]: _type = "Task" [ 1407.580993] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.588621] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.090637] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783433, 'name': ReconfigVM_Task, 'duration_secs': 0.194443} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.090979] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1408.091791] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c399a0c-56f0-4edc-aa46-9f3d79167440 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.112788] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620/1b70d077-7610-4524-ad72-ac68413f4620.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1408.113021] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57395ed4-fe17-45f7-9bf6-f489f7596f6c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.130155] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1408.130155] env[68233]: value = "task-2783434" [ 1408.130155] env[68233]: _type = "Task" [ 1408.130155] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.136883] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783434, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.641585] env[68233]: DEBUG oslo_vmware.api [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783434, 'name': ReconfigVM_Task, 'duration_secs': 0.322282} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.641924] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Reconfigured VM instance instance-0000007d to attach disk [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620/1b70d077-7610-4524-ad72-ac68413f4620.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1408.642215] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1409.149641] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbba4daa-dffa-43b3-a288-741578b267b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.168579] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658aef39-aad3-4fe0-bc47-477bfa3f035d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.185485] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1409.724352] env[68233]: DEBUG nova.network.neutron [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Port 46866dde-e539-4c4c-9367-f7bc868ce8da binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1410.746142] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1410.746528] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1410.746528] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.779925] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.780227] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1411.780310] env[68233]: DEBUG nova.network.neutron [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.481941] env[68233]: DEBUG nova.network.neutron [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.985382] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1413.507889] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0292364f-e997-4c73-97aa-22a3e0d124af {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.526884] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9622e13-fef2-4576-9cfe-2bd3647db0cc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.533543] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1414.040126] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a3f3ad-11c0-4ebc-b8f1-e54319ac2f1d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance '1b70d077-7610-4524-ad72-ac68413f4620' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1415.993581] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1415.993882] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1415.994081] env[68233]: DEBUG nova.compute.manager [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Going to confirm migration 9 {{(pid=68233) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1416.541604] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.541767] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1416.541941] env[68233]: DEBUG nova.network.neutron [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.542137] env[68233]: DEBUG nova.objects.instance [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'info_cache' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1417.774903] env[68233]: DEBUG nova.network.neutron [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.278012] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1418.278348] env[68233]: DEBUG nova.objects.instance [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'migration_context' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1418.781043] env[68233]: DEBUG nova.objects.base [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Object Instance<1b70d077-7610-4524-ad72-ac68413f4620> lazy-loaded attributes: info_cache,migration_context {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1418.782038] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e02558-2648-4d2c-9bf3-f73c39b18ecc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.801680] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30992cd7-d0de-4137-afb4-4590f6e406f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.806889] env[68233]: DEBUG oslo_vmware.api [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1418.806889] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52faf8da-16e4-e66c-f68d-183e67aeba30" [ 1418.806889] env[68233]: _type = "Task" [ 1418.806889] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.814638] env[68233]: DEBUG oslo_vmware.api [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52faf8da-16e4-e66c-f68d-183e67aeba30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.260926] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1419.261177] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.316261] env[68233]: DEBUG oslo_vmware.api [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52faf8da-16e4-e66c-f68d-183e67aeba30, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.316527] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1419.316779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1419.764121] env[68233]: DEBUG nova.compute.utils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1419.870378] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ba9d3e-5aa0-4acf-a492-39042a779ee0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.878043] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c194d6f0-feb1-4cbf-abd7-9b29dd0cd4c4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.907953] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26a8b64-f4f1-4a81-827a-d6bf6dc51446 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.914620] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f059dc-e3ce-4c7f-b549-56c1a7cd1baf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.927117] env[68233]: DEBUG nova.compute.provider_tree [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.267470] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1420.431265] env[68233]: DEBUG nova.scheduler.client.report [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1421.333959] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1421.334276] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1421.334493] env[68233]: INFO nova.compute.manager [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Attaching volume 15c987af-00d9-46d7-96c2-2931ecb50228 to /dev/sdb [ 1421.366507] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa35ad8-9537-467d-8c45-a31c265337f9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.374211] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d29fbe1c-4223-450f-bda0-08f33987346d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.387211] env[68233]: DEBUG nova.virt.block_device [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updating existing volume attachment record: 1ffaa059-977f-44b9-90b9-d75466624d25 {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1421.443933] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.127s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1421.444220] env[68233]: DEBUG nova.compute.manager [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68233) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1422.009623] env[68233]: INFO nova.scheduler.client.report [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted allocation for migration cf2f54f0-56e6-4d31-ac2d-ef6674d6631d [ 1422.515669] env[68233]: DEBUG oslo_concurrency.lockutils [None req-04787008-ebe6-4623-9af7-11678b9ec78d tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.522s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1422.854744] env[68233]: DEBUG nova.objects.instance [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'flavor' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1423.359778] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.359988] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1423.360145] env[68233]: DEBUG nova.network.neutron [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.360326] env[68233]: DEBUG nova.objects.instance [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'info_cache' on Instance uuid 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1423.864101] env[68233]: DEBUG nova.objects.base [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Object Instance<1b70d077-7610-4524-ad72-ac68413f4620> lazy-loaded attributes: flavor,info_cache {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1424.565448] env[68233]: DEBUG nova.network.neutron [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [{"id": "46866dde-e539-4c4c-9367-f7bc868ce8da", "address": "fa:16:3e:0d:1f:c6", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46866dde-e5", "ovs_interfaceid": "46866dde-e539-4c4c-9367-f7bc868ce8da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.068334] env[68233]: DEBUG oslo_concurrency.lockutils [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-1b70d077-7610-4524-ad72-ac68413f4620" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1425.932690] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1425.932928] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559560', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'name': 'volume-15c987af-00d9-46d7-96c2-2931ecb50228', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042', 'attached_at': '', 'detached_at': '', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'serial': '15c987af-00d9-46d7-96c2-2931ecb50228'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1425.933838] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-515fa6f6-119c-400d-8c89-13b82333bbe9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.951140] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5df1ec1-42a0-460c-9a9d-a29414b29632 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.975187] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Reconfiguring VM instance instance-0000007e to attach disk [datastore2] volume-15c987af-00d9-46d7-96c2-2931ecb50228/volume-15c987af-00d9-46d7-96c2-2931ecb50228.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1425.975455] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33691945-1760-4cb9-904e-6f6df9d36b20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.993587] env[68233]: DEBUG oslo_vmware.api [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1425.993587] env[68233]: value = "task-2783437" [ 1425.993587] env[68233]: _type = "Task" [ 1425.993587] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.001322] env[68233]: DEBUG oslo_vmware.api [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783437, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.075140] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1426.075625] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-284cc012-c25d-448f-b12b-4b01e415c184 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.082344] env[68233]: DEBUG oslo_vmware.api [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1426.082344] env[68233]: value = "task-2783438" [ 1426.082344] env[68233]: _type = "Task" [ 1426.082344] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.090219] env[68233]: DEBUG oslo_vmware.api [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783438, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.503683] env[68233]: DEBUG oslo_vmware.api [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783437, 'name': ReconfigVM_Task, 'duration_secs': 0.338418} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.503970] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Reconfigured VM instance instance-0000007e to attach disk [datastore2] volume-15c987af-00d9-46d7-96c2-2931ecb50228/volume-15c987af-00d9-46d7-96c2-2931ecb50228.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1426.508630] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-772f57df-e8c0-4936-8216-f30c2536d9b0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.524310] env[68233]: DEBUG oslo_vmware.api [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1426.524310] env[68233]: value = "task-2783439" [ 1426.524310] env[68233]: _type = "Task" [ 1426.524310] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.531520] env[68233]: DEBUG oslo_vmware.api [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.590933] env[68233]: DEBUG oslo_vmware.api [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783438, 'name': PowerOnVM_Task, 'duration_secs': 0.367895} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.591224] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.591410] env[68233]: DEBUG nova.compute.manager [None req-75cc6c66-975c-4478-a1cc-0e624700803e tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1426.592189] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91562af-2d7c-4358-85fb-2bd448b0ccc9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.034660] env[68233]: DEBUG oslo_vmware.api [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783439, 'name': ReconfigVM_Task, 'duration_secs': 0.150436} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.034970] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559560', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'name': 'volume-15c987af-00d9-46d7-96c2-2931ecb50228', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042', 'attached_at': '', 'detached_at': '', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'serial': '15c987af-00d9-46d7-96c2-2931ecb50228'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1427.116270] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.116638] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1427.397357] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1427.397575] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1427.397785] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "1b70d077-7610-4524-ad72-ac68413f4620-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1427.397997] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1427.398182] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1427.400364] env[68233]: INFO nova.compute.manager [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Terminating instance [ 1427.903808] env[68233]: DEBUG nova.compute.manager [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1427.904138] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1427.905038] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d596b0fb-08a8-4015-80fa-3894164adc94 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.912931] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1427.913168] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ead25f67-c829-4812-92f8-5a8cc46ef53f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.918906] env[68233]: DEBUG oslo_vmware.api [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1427.918906] env[68233]: value = "task-2783440" [ 1427.918906] env[68233]: _type = "Task" [ 1427.918906] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.926167] env[68233]: DEBUG oslo_vmware.api [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783440, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.071507] env[68233]: DEBUG nova.objects.instance [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'flavor' on Instance uuid cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1428.428728] env[68233]: DEBUG oslo_vmware.api [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783440, 'name': PowerOffVM_Task, 'duration_secs': 0.19518} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.429116] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1428.429116] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1428.429354] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b23fd1df-448a-4f55-854f-4663cf6079ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.499175] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1428.499411] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1428.499614] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleting the datastore file [datastore2] 1b70d077-7610-4524-ad72-ac68413f4620 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1428.499876] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20ae3e4b-de39-41d6-b70a-0e4a9af5dd0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.505530] env[68233]: DEBUG oslo_vmware.api [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1428.505530] env[68233]: value = "task-2783442" [ 1428.505530] env[68233]: _type = "Task" [ 1428.505530] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.513221] env[68233]: DEBUG oslo_vmware.api [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783442, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.576526] env[68233]: DEBUG oslo_concurrency.lockutils [None req-6f433974-9893-441e-ae92-fab5eaada1ff tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.242s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1428.753968] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1428.754261] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1429.015129] env[68233]: DEBUG oslo_vmware.api [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783442, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129309} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.015317] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1429.015497] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1429.015673] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1429.015850] env[68233]: INFO nova.compute.manager [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1429.016105] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1429.016297] env[68233]: DEBUG nova.compute.manager [-] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1429.016393] env[68233]: DEBUG nova.network.neutron [-] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1429.257973] env[68233]: INFO nova.compute.manager [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Detaching volume 15c987af-00d9-46d7-96c2-2931ecb50228 [ 1429.292094] env[68233]: INFO nova.virt.block_device [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Attempting to driver detach volume 15c987af-00d9-46d7-96c2-2931ecb50228 from mountpoint /dev/sdb [ 1429.292425] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1429.292692] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559560', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'name': 'volume-15c987af-00d9-46d7-96c2-2931ecb50228', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042', 'attached_at': '', 'detached_at': '', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'serial': '15c987af-00d9-46d7-96c2-2931ecb50228'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1429.293621] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883738e7-f807-42d8-a412-24d01ac8f2ef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.317316] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e12c51-12f1-4642-a9a9-50175e8e186f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.324181] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f290d0c9-d6a5-4614-b5b2-0e49bf370f8d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.347684] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465f0b00-fc26-4f98-84df-4b2960545156 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.363643] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] The volume has not been displaced from its original location: [datastore2] volume-15c987af-00d9-46d7-96c2-2931ecb50228/volume-15c987af-00d9-46d7-96c2-2931ecb50228.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1429.368823] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Reconfiguring VM instance instance-0000007e to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1429.369290] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31246946-ba41-40e8-8cc6-9c022b774042 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.387122] env[68233]: DEBUG oslo_vmware.api [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1429.387122] env[68233]: value = "task-2783443" [ 1429.387122] env[68233]: _type = "Task" [ 1429.387122] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.394853] env[68233]: DEBUG oslo_vmware.api [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783443, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.500255] env[68233]: DEBUG nova.compute.manager [req-24991917-78a9-4d51-845c-e8e99bd09523 req-f203ed2d-8f75-4b9d-ae94-97ee23806e10 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Received event network-vif-deleted-46866dde-e539-4c4c-9367-f7bc868ce8da {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1429.500554] env[68233]: INFO nova.compute.manager [req-24991917-78a9-4d51-845c-e8e99bd09523 req-f203ed2d-8f75-4b9d-ae94-97ee23806e10 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Neutron deleted interface 46866dde-e539-4c4c-9367-f7bc868ce8da; detaching it from the instance and deleting it from the info cache [ 1429.500856] env[68233]: DEBUG nova.network.neutron [req-24991917-78a9-4d51-845c-e8e99bd09523 req-f203ed2d-8f75-4b9d-ae94-97ee23806e10 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.618073] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.618073] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1429.897086] env[68233]: DEBUG oslo_vmware.api [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783443, 'name': ReconfigVM_Task, 'duration_secs': 0.203861} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.897385] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Reconfigured VM instance instance-0000007e to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1429.901923] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9555251-c550-41fb-9d18-1dd9907d067e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.915837] env[68233]: DEBUG oslo_vmware.api [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1429.915837] env[68233]: value = "task-2783444" [ 1429.915837] env[68233]: _type = "Task" [ 1429.915837] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.923059] env[68233]: DEBUG oslo_vmware.api [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783444, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.974637] env[68233]: DEBUG nova.network.neutron [-] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.004673] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b780065d-3b94-4f72-aef8-75ade62098fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.013912] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e02abfb-5bc9-45a2-9056-ad185492b83e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.040323] env[68233]: DEBUG nova.compute.manager [req-24991917-78a9-4d51-845c-e8e99bd09523 req-f203ed2d-8f75-4b9d-ae94-97ee23806e10 service nova] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Detach interface failed, port_id=46866dde-e539-4c4c-9367-f7bc868ce8da, reason: Instance 1b70d077-7610-4524-ad72-ac68413f4620 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1430.115634] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.115836] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.115979] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1430.116153] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1430.426100] env[68233]: DEBUG oslo_vmware.api [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783444, 'name': ReconfigVM_Task, 'duration_secs': 0.137128} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.426100] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559560', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'name': 'volume-15c987af-00d9-46d7-96c2-2931ecb50228', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042', 'attached_at': '', 'detached_at': '', 'volume_id': '15c987af-00d9-46d7-96c2-2931ecb50228', 'serial': '15c987af-00d9-46d7-96c2-2931ecb50228'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1430.478580] env[68233]: INFO nova.compute.manager [-] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Took 1.46 seconds to deallocate network for instance. [ 1430.618877] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1430.619265] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1430.619320] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1430.619460] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1430.620320] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fbee7f-8f96-48b6-adb9-648b77c9f606 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.628559] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e48300-6f6e-459d-a404-f709263b3751 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.642011] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec97698e-952a-4e78-9af3-29dd8d6470a5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.648186] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb98108f-a9f4-47f4-878f-a2422a467fc8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.676612] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180801MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1430.676709] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1430.676913] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1430.968135] env[68233]: DEBUG nova.objects.instance [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'flavor' on Instance uuid cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1430.984456] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.730689] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1431.730971] env[68233]: WARNING nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 1b70d077-7610-4524-ad72-ac68413f4620 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1431.731052] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1431.731193] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1431.764805] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d374848-49a4-4de1-920e-21d0d7b4d1e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.772234] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9abf804c-f15d-4af1-ad92-3a34e433aa68 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.801166] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136c291d-b16e-431d-9eed-65cceb8d67d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.808074] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d58c9c-102c-4e98-89ae-e070016ddc30 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.820611] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.975231] env[68233]: DEBUG oslo_concurrency.lockutils [None req-eb022e34-60ba-4daf-aa22-dfcb71493a99 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1432.323269] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1432.830779] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1432.830779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.152s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1432.830779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.844s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.830779] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1432.831345] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1432.831345] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Cleaning up deleted instances with incomplete migration {{(pid=68233) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11791}} [ 1432.847452] env[68233]: INFO nova.scheduler.client.report [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted allocations for instance 1b70d077-7610-4524-ad72-ac68413f4620 [ 1432.988297] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.988595] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.988794] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1432.988974] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1432.989164] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1432.991214] env[68233]: INFO nova.compute.manager [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Terminating instance [ 1433.355221] env[68233]: DEBUG oslo_concurrency.lockutils [None req-80bf41db-876f-4142-8f93-2c3cf4889415 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "1b70d077-7610-4524-ad72-ac68413f4620" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.957s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.494319] env[68233]: DEBUG nova.compute.manager [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1433.494550] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1433.495998] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f937d9fc-7358-4013-8b01-0c40ce2ea628 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.503581] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1433.503807] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e8897ac-49f5-4d57-b0a4-ea5ed97b55d0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.509736] env[68233]: DEBUG oslo_vmware.api [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1433.509736] env[68233]: value = "task-2783445" [ 1433.509736] env[68233]: _type = "Task" [ 1433.509736] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.517372] env[68233]: DEBUG oslo_vmware.api [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783445, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.019413] env[68233]: DEBUG oslo_vmware.api [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783445, 'name': PowerOffVM_Task, 'duration_secs': 0.159756} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.019708] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1434.019816] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1434.020053] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52b9e0f0-9c56-45c0-be4d-d021bba30dba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.084327] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1434.084542] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1434.084710] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleting the datastore file [datastore2] cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1434.084967] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e60d51ce-9a87-46fe-ad1f-48b4d1358088 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.091075] env[68233]: DEBUG oslo_vmware.api [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for the task: (returnval){ [ 1434.091075] env[68233]: value = "task-2783447" [ 1434.091075] env[68233]: _type = "Task" [ 1434.091075] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1434.098839] env[68233]: DEBUG oslo_vmware.api [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1434.333382] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1434.455728] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1434.456026] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1434.601883] env[68233]: DEBUG oslo_vmware.api [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Task: {'id': task-2783447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128335} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.602083] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1434.602369] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1434.602603] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1434.602758] env[68233]: INFO nova.compute.manager [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1434.603012] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1434.603204] env[68233]: DEBUG nova.compute.manager [-] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1434.603301] env[68233]: DEBUG nova.network.neutron [-] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1434.958109] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1435.074927] env[68233]: DEBUG nova.compute.manager [req-609cef47-65a3-4fb2-acf5-f7aa1cd36603 req-791bf6d5-3333-4803-967b-a51876f123b0 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Received event network-vif-deleted-0db72888-7a72-4956-a3fc-9195096eb3f4 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1435.075332] env[68233]: INFO nova.compute.manager [req-609cef47-65a3-4fb2-acf5-f7aa1cd36603 req-791bf6d5-3333-4803-967b-a51876f123b0 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Neutron deleted interface 0db72888-7a72-4956-a3fc-9195096eb3f4; detaching it from the instance and deleting it from the info cache [ 1435.075332] env[68233]: DEBUG nova.network.neutron [req-609cef47-65a3-4fb2-acf5-f7aa1cd36603 req-791bf6d5-3333-4803-967b-a51876f123b0 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.111283] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1435.480361] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.480707] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.482217] env[68233]: INFO nova.compute.claims [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1435.547271] env[68233]: DEBUG nova.network.neutron [-] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.577717] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1e42514-bcd7-43ca-98f5-84664ab11134 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.587103] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5a02d9-1401-4c84-ae8d-c20eda5866c0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.611931] env[68233]: DEBUG nova.compute.manager [req-609cef47-65a3-4fb2-acf5-f7aa1cd36603 req-791bf6d5-3333-4803-967b-a51876f123b0 service nova] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Detach interface failed, port_id=0db72888-7a72-4956-a3fc-9195096eb3f4, reason: Instance cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1436.049595] env[68233]: INFO nova.compute.manager [-] [instance: cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042] Took 1.45 seconds to deallocate network for instance. [ 1436.116429] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1436.116794] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Cleaning up deleted instances {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11753}} [ 1436.526389] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9f1bee-2803-4b1a-be7f-4410ea9c125c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.534120] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e8a609-864b-4076-9739-e0fdea2ed3cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.564418] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1436.565344] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11f4ff2-4dcf-460a-8fda-b0eed9bf355b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.571974] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9eab7e-3669-4f53-99e8-5264dafd2d6b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.584475] env[68233]: DEBUG nova.compute.provider_tree [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.623155] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] There are 17 instances to clean {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11762}} [ 1436.623309] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 1b70d077-7610-4524-ad72-ac68413f4620] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1437.088047] env[68233]: DEBUG nova.scheduler.client.report [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1437.126335] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: e905567b-c78e-4c21-b134-78f444e941f4] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1437.593213] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.112s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1437.593742] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1437.597028] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.033s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.597211] env[68233]: DEBUG nova.objects.instance [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lazy-loading 'resources' on Instance uuid cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1437.629547] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 280bc403-3d10-4a29-9507-c548d9cf1d1a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1438.100394] env[68233]: DEBUG nova.compute.utils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1438.105035] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1438.105150] env[68233]: DEBUG nova.network.neutron [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1438.131909] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 916774a9-bfd3-4931-bc3a-1d50471a1c40] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1438.140153] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb74966-d850-42b8-8db8-85e8cd37b1e5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.148521] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-652c341f-e0cc-4f44-841d-bffdbb2b6630 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.152564] env[68233]: DEBUG nova.policy [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18e896b81a32439ebc3db2aacd4d8967', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb98b234b87a4120ad06095426f74ce0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1438.182994] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdeee4e-44c2-4760-ac16-60d952d63cb5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.190296] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91825b88-8c54-4a59-8d96-15b8155755a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.203151] env[68233]: DEBUG nova.compute.provider_tree [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1438.410029] env[68233]: DEBUG nova.network.neutron [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Successfully created port: f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.605480] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1438.635547] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: f24af50e-90cd-4398-84d1-a1e1849d01d6] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1438.706663] env[68233]: DEBUG nova.scheduler.client.report [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1439.139432] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: af8d2b01-b0a5-408b-ace3-dd085097b393] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1439.213050] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.616s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1439.234590] env[68233]: INFO nova.scheduler.client.report [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Deleted allocations for instance cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042 [ 1439.616605] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1439.643724] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1439.643966] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.644140] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1439.644330] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.644478] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1439.644625] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1439.644839] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1439.644999] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1439.645179] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1439.645349] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1439.645523] env[68233]: DEBUG nova.virt.hardware [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1439.645942] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: ffc57efd-d031-4b09-8255-2498f01e8c78] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1439.648183] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fc4b0e-c934-4edf-a696-775bc56a96cb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.657131] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ea67f4-f35c-4611-b6a5-ed4625b191bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.742425] env[68233]: DEBUG oslo_concurrency.lockutils [None req-65a3cfe2-08c7-43d8-88b9-5672309ce571 tempest-AttachVolumeNegativeTest-320001655 tempest-AttachVolumeNegativeTest-320001655-project-member] Lock "cd2a5667-e00b-4bf8-85a5-ec2fcc4d2042" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.754s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1439.757467] env[68233]: DEBUG nova.compute.manager [req-fa2a6c0b-14a5-4368-94e6-91ab383bcd0c req-a679b6b3-644b-4e4c-af9c-0111f7bcab6c service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Received event network-vif-plugged-f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1439.757467] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa2a6c0b-14a5-4368-94e6-91ab383bcd0c req-a679b6b3-644b-4e4c-af9c-0111f7bcab6c service nova] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1439.757467] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa2a6c0b-14a5-4368-94e6-91ab383bcd0c req-a679b6b3-644b-4e4c-af9c-0111f7bcab6c service nova] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1439.758071] env[68233]: DEBUG oslo_concurrency.lockutils [req-fa2a6c0b-14a5-4368-94e6-91ab383bcd0c req-a679b6b3-644b-4e4c-af9c-0111f7bcab6c service nova] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1439.758071] env[68233]: DEBUG nova.compute.manager [req-fa2a6c0b-14a5-4368-94e6-91ab383bcd0c req-a679b6b3-644b-4e4c-af9c-0111f7bcab6c service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] No waiting events found dispatching network-vif-plugged-f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1439.758071] env[68233]: WARNING nova.compute.manager [req-fa2a6c0b-14a5-4368-94e6-91ab383bcd0c req-a679b6b3-644b-4e4c-af9c-0111f7bcab6c service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Received unexpected event network-vif-plugged-f7cab745-6917-4d44-9f7c-057846281627 for instance with vm_state building and task_state spawning. [ 1439.839948] env[68233]: DEBUG nova.network.neutron [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Successfully updated port: f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1440.152272] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 81e0800d-7731-433c-9238-b4aa07a4ddda] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1440.342577] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.342730] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1440.342888] env[68233]: DEBUG nova.network.neutron [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.655979] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: aadc7dbe-456c-4bf3-b26d-bac672459fb9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1440.879880] env[68233]: DEBUG nova.network.neutron [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1440.996061] env[68233]: DEBUG nova.network.neutron [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.159703] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 863e15c6-caa4-47aa-902a-7be2c9538687] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1441.498797] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1441.499137] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Instance network_info: |[{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1441.499567] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:23:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7cab745-6917-4d44-9f7c-057846281627', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1441.507400] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1441.507400] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1441.507559] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de2a2ebe-ca55-4d37-9c48-f9674f867307 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.526855] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1441.526855] env[68233]: value = "task-2783449" [ 1441.526855] env[68233]: _type = "Task" [ 1441.526855] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.534458] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783449, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.663328] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 151b16bc-6b78-4527-8571-b07b5ad7db7b] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1441.784685] env[68233]: DEBUG nova.compute.manager [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Received event network-changed-f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1441.784803] env[68233]: DEBUG nova.compute.manager [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Refreshing instance network info cache due to event network-changed-f7cab745-6917-4d44-9f7c-057846281627. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1441.785010] env[68233]: DEBUG oslo_concurrency.lockutils [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.785210] env[68233]: DEBUG oslo_concurrency.lockutils [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1441.785392] env[68233]: DEBUG nova.network.neutron [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Refreshing network info cache for port f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.037279] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783449, 'name': CreateVM_Task, 'duration_secs': 0.306879} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.037475] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1442.038203] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.038338] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1442.038653] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1442.038902] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-535b1454-4a1e-4a81-8da5-365794ebdd9e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.043172] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1442.043172] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529827cf-1558-70a9-ba9c-275fe4ff5a1f" [ 1442.043172] env[68233]: _type = "Task" [ 1442.043172] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.050410] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529827cf-1558-70a9-ba9c-275fe4ff5a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.166547] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 171da032-9aeb-4972-8ec7-4181e2667ac0] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1442.506458] env[68233]: DEBUG nova.network.neutron [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updated VIF entry in instance network info cache for port f7cab745-6917-4d44-9f7c-057846281627. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.506851] env[68233]: DEBUG nova.network.neutron [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.553985] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]529827cf-1558-70a9-ba9c-275fe4ff5a1f, 'name': SearchDatastore_Task, 'duration_secs': 0.009735} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.555491] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1442.555491] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1442.555491] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.555491] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1442.555491] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1442.555768] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5228217-f54a-40c6-977d-bd87ebe0ab22 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.564046] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1442.564166] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1442.564787] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-887c3fad-e028-44da-a7ca-85415e949cfb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.570764] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1442.570764] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52effb43-6d53-de47-9ebf-66cf595b5a9c" [ 1442.570764] env[68233]: _type = "Task" [ 1442.570764] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.578450] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52effb43-6d53-de47-9ebf-66cf595b5a9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.670223] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 5038002c-884f-4f75-a1fe-aa84220c9ea6] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1443.010726] env[68233]: DEBUG oslo_concurrency.lockutils [req-10a1d704-20ff-4a63-82ba-60d3929f8880 req-b9348150-28f8-43c5-b796-acd34f67fece service nova] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1443.080668] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52effb43-6d53-de47-9ebf-66cf595b5a9c, 'name': SearchDatastore_Task, 'duration_secs': 0.00842} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.081548] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dca1a8db-5e0c-4a0a-86a5-ad652f91e58b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.086450] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1443.086450] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa2993-f761-7e8a-c73d-a520c68e65ea" [ 1443.086450] env[68233]: _type = "Task" [ 1443.086450] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.093733] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa2993-f761-7e8a-c73d-a520c68e65ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.173590] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 9f862347-508b-4c8a-a338-97972b0c0b0b] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1443.597036] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52fa2993-f761-7e8a-c73d-a520c68e65ea, 'name': SearchDatastore_Task, 'duration_secs': 0.009766} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.597336] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1443.598036] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1443.598036] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05c26d97-ca69-48ec-9c51-7691f683d469 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.604041] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1443.604041] env[68233]: value = "task-2783451" [ 1443.604041] env[68233]: _type = "Task" [ 1443.604041] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.611260] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783451, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.677051] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 03bdac8b-86e0-4ad3-9f2c-7a08db5d05f9] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1444.114124] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783451, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421915} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.114405] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1444.114622] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1444.114866] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1589140b-90a3-4ded-b571-9c4b6436d8ce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.121377] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1444.121377] env[68233]: value = "task-2783452" [ 1444.121377] env[68233]: _type = "Task" [ 1444.121377] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.129823] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783452, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.180579] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: c2d04b37-3eae-46cb-a227-b62d36c62a6a] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1444.631513] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783452, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059459} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.632200] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1444.632636] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0768df4c-8109-4842-a98a-095c7bec08b5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.655301] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1444.655566] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b35ce34-419e-46fb-ab57-897bb927788c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.676762] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1444.676762] env[68233]: value = "task-2783453" [ 1444.676762] env[68233]: _type = "Task" [ 1444.676762] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.684757] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: dd59cab5-3f9a-42cc-93f1-75cea940acdd] Instance has had 0 of 5 cleanup attempts {{(pid=68233) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11766}} [ 1444.686472] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783453, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.188579] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783453, 'name': ReconfigVM_Task, 'duration_secs': 0.291363} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.189027] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to attach disk [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1445.189756] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2be78514-1e5e-43bd-bf37-b01dd4af3c3e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.196373] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1445.196373] env[68233]: value = "task-2783454" [ 1445.196373] env[68233]: _type = "Task" [ 1445.196373] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.203835] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783454, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.706906] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783454, 'name': Rename_Task, 'duration_secs': 0.1541} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.707200] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.707440] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc84af85-eca6-4091-aef8-7517f73e06c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.713231] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1445.713231] env[68233]: value = "task-2783455" [ 1445.713231] env[68233]: _type = "Task" [ 1445.713231] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.720349] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.223927] env[68233]: DEBUG oslo_vmware.api [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783455, 'name': PowerOnVM_Task, 'duration_secs': 0.458733} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.224320] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1446.224320] env[68233]: INFO nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Took 6.61 seconds to spawn the instance on the hypervisor. [ 1446.224502] env[68233]: DEBUG nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1446.225283] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b510f43-dbd9-4849-8e0c-e8a4f94383b7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.744328] env[68233]: INFO nova.compute.manager [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Took 11.28 seconds to build instance. [ 1447.247013] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d96dd851-c4b6-4cd9-8f8e-3c80a7712066 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.791s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1447.434312] env[68233]: DEBUG nova.compute.manager [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Received event network-changed-f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1447.434423] env[68233]: DEBUG nova.compute.manager [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Refreshing instance network info cache due to event network-changed-f7cab745-6917-4d44-9f7c-057846281627. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1447.435501] env[68233]: DEBUG oslo_concurrency.lockutils [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.435501] env[68233]: DEBUG oslo_concurrency.lockutils [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1447.435501] env[68233]: DEBUG nova.network.neutron [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Refreshing network info cache for port f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.362915] env[68233]: DEBUG nova.network.neutron [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updated VIF entry in instance network info cache for port f7cab745-6917-4d44-9f7c-057846281627. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1448.363292] env[68233]: DEBUG nova.network.neutron [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.866162] env[68233]: DEBUG oslo_concurrency.lockutils [req-ab0a4ee2-2113-42e7-a781-372a3c46e68d req-fa06a189-52ec-4bc5-bf2f-99c6cf713617 service nova] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1485.173238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.173657] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1485.677627] env[68233]: DEBUG nova.compute.utils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1486.181165] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.240018] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1487.240421] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.240557] env[68233]: INFO nova.compute.manager [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Attaching volume a7cd6ead-11d4-4191-8f05-2f18b8a488ec to /dev/sdb [ 1487.294132] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef30ae3-76fe-4b99-ae93-d4cefaab5835 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.301504] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6583077e-bdaf-4315-80be-2fd35952fce7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.314350] env[68233]: DEBUG nova.virt.block_device [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating existing volume attachment record: b3a4c22a-5861-4623-9ccf-fac9418069fc {{(pid=68233) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1491.855699] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Volume attach. Driver type: vmdk {{(pid=68233) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1491.855942] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559563', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'name': 'volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '17f29ee1-39e8-4893-9fb6-64694448c60f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'serial': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1491.856814] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bf83ef-7895-4704-8f72-63d345845678 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.873438] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5725be-4adb-4e55-80fa-e4020f0620ca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.898782] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec/volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1491.899050] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c3c44dd-7a3b-46bf-b7e2-78709de55800 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.916572] env[68233]: DEBUG oslo_vmware.api [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1491.916572] env[68233]: value = "task-2783461" [ 1491.916572] env[68233]: _type = "Task" [ 1491.916572] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.425979] env[68233]: DEBUG oslo_vmware.api [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783461, 'name': ReconfigVM_Task, 'duration_secs': 0.355417} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.426274] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to attach disk [datastore2] volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec/volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1492.430797] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7921b08-9d1e-482d-9432-c255b52cb87e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.444774] env[68233]: DEBUG oslo_vmware.api [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1492.444774] env[68233]: value = "task-2783462" [ 1492.444774] env[68233]: _type = "Task" [ 1492.444774] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.452242] env[68233]: DEBUG oslo_vmware.api [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783462, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.954694] env[68233]: DEBUG oslo_vmware.api [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783462, 'name': ReconfigVM_Task, 'duration_secs': 0.130447} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.955015] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559563', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'name': 'volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '17f29ee1-39e8-4893-9fb6-64694448c60f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'serial': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec'} {{(pid=68233) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1493.990160] env[68233]: DEBUG nova.objects.instance [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'flavor' on Instance uuid 17f29ee1-39e8-4893-9fb6-64694448c60f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1494.496792] env[68233]: DEBUG oslo_concurrency.lockutils [None req-60f9abc4-b76c-4a08-af17-432a7fc55370 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.256s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1495.188389] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.188699] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.188750] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.188893] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.189052] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.189203] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.189345] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1495.189531] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1495.415650] env[68233]: DEBUG nova.compute.manager [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Stashing vm_state: active {{(pid=68233) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1495.692641] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1495.692897] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1495.693091] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1495.693252] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1495.694163] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c15302a-c3b6-456e-98ed-743c602f7af3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.702794] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6047109-03fa-474f-a522-e2601f3b20f3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.717835] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e9c2c0-9911-4c5c-80f7-b053e94aa4df {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.724011] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01163ea2-52db-405c-91cf-39c177c8c5c6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.751843] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180918MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1495.751987] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1495.752199] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1495.934451] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1496.758766] env[68233]: INFO nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating resource usage from migration 04b025f4-b4f8-4670-abe7-81079e7b0aa0 [ 1496.773802] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Migration 04b025f4-b4f8-4670-abe7-81079e7b0aa0 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1496.773949] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance 17f29ee1-39e8-4893-9fb6-64694448c60f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1496.774134] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1496.774284] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1496.807509] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367d984a-2554-4fe0-8a47-3cffaf3bbe84 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.815416] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad62a9d-064e-4f56-96aa-ee6a54a836d5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.845096] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c790cde-de13-42ef-89c4-bae5ce33121e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.851558] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4758e9e9-2b01-4a47-bcc2-ddeec0753fef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.863963] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1497.367247] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1497.872191] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1497.872603] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.120s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1497.872677] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.938s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1498.378228] env[68233]: INFO nova.compute.claims [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1498.885011] env[68233]: INFO nova.compute.resource_tracker [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating resource usage from migration 04b025f4-b4f8-4670-abe7-81079e7b0aa0 [ 1498.921258] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d3aed2-1a8d-4708-acd2-a9278b757f3c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.928527] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cde427-cef4-447a-b4b2-386f88d6cb41 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.958618] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93b3c81-de54-4561-a559-d2ecbe2366e3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.965144] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad78402-9dd2-49c8-97e3-908b763ad920 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.977591] env[68233]: DEBUG nova.compute.provider_tree [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1499.480583] env[68233]: DEBUG nova.scheduler.client.report [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1499.796853] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.797101] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1499.985856] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.113s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1499.986240] env[68233]: INFO nova.compute.manager [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Migrating [ 1500.500763] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.500987] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1500.501200] env[68233]: DEBUG nova.network.neutron [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1501.224060] env[68233]: DEBUG nova.network.neutron [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.726438] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1503.240619] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29427683-4768-4945-94e9-f0aa64a879c1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.261677] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 0 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1503.767387] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1503.767689] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-907dc645-8c1e-466c-b918-d18e57a125fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.774466] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1503.774466] env[68233]: value = "task-2783463" [ 1503.774466] env[68233]: _type = "Task" [ 1503.774466] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.782217] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783463, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.284125] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783463, 'name': PowerOffVM_Task, 'duration_secs': 0.195314} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.284484] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1504.284557] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 17 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1504.791237] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1504.791607] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1504.791817] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1504.791893] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1504.792022] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1504.792180] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1504.792442] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1504.792646] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1504.792845] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1504.793023] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1504.793205] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1504.798303] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b242c48-7bd0-4fa5-b6b2-1dbc348c9e2d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.813843] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1504.813843] env[68233]: value = "task-2783464" [ 1504.813843] env[68233]: _type = "Task" [ 1504.813843] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.821848] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.324114] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783464, 'name': ReconfigVM_Task, 'duration_secs': 0.180288} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.324473] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 33 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1505.831222] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1505.831490] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1505.831620] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1505.831804] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1505.831955] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1505.832119] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1505.832329] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1505.832497] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1505.832667] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1505.832831] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1505.833012] env[68233]: DEBUG nova.virt.hardware [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1505.838219] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1505.838495] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c940c525-2e36-44c6-adf9-b7d651aced0a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.856657] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1505.856657] env[68233]: value = "task-2783465" [ 1505.856657] env[68233]: _type = "Task" [ 1505.856657] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.863964] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783465, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.367066] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783465, 'name': ReconfigVM_Task, 'duration_secs': 0.266207} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.367413] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1506.368017] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984bc64f-3abe-4ad9-8c8e-90df304f6f10 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.393410] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1506.394200] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7ab99ded-0522-49b0-9ae8-a84d2e89b7e0 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.413563] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1506.413563] env[68233]: value = "task-2783466" [ 1506.413563] env[68233]: _type = "Task" [ 1506.413563] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.421183] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.922861] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783466, 'name': ReconfigVM_Task, 'duration_secs': 0.326755} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.923145] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to attach disk [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1506.923424] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 50 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1507.430411] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4041836-eccc-4cb2-a142-dce86e51639e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.451802] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38739b75-41f1-4a71-a66b-837dbfc83396 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.471447] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 67 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1509.107955] env[68233]: DEBUG nova.network.neutron [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Port f7cab745-6917-4d44-9f7c-057846281627 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1510.130050] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1510.130454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1510.130454] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1511.165577] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.165869] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1511.166067] env[68233]: DEBUG nova.network.neutron [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1511.858412] env[68233]: DEBUG nova.network.neutron [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.361226] env[68233]: DEBUG oslo_concurrency.lockutils [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1512.870820] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e062b6d-54e8-4335-ae68-cfe1e512965e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.878606] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba96a897-8b0d-4fd8-83e2-cd13d0bc2e20 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.974678] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db67fc80-fffd-41b1-b5a2-24efdd890f18 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.996310] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b3589f-f49c-406f-abf6-98aea36d8408 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.003126] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 83 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1514.509202] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1514.509510] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9b100c8c-e54e-433d-b22e-34543e87bb39 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.516727] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1514.516727] env[68233]: value = "task-2783467" [ 1514.516727] env[68233]: _type = "Task" [ 1514.516727] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.525280] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.026263] env[68233]: DEBUG oslo_vmware.api [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783467, 'name': PowerOnVM_Task, 'duration_secs': 0.39648} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.026673] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1515.026673] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-a33122c5-10dd-4d1e-9ca0-1a83f7e33ee8 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance '17f29ee1-39e8-4893-9fb6-64694448c60f' progress to 100 {{(pid=68233) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1516.981269] env[68233]: DEBUG nova.network.neutron [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Port f7cab745-6917-4d44-9f7c-057846281627 binding to destination host cpu-1 is already ACTIVE {{(pid=68233) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1516.981547] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1516.981692] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1516.981854] env[68233]: DEBUG nova.network.neutron [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1517.734330] env[68233]: DEBUG nova.network.neutron [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.237580] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1518.741676] env[68233]: DEBUG nova.compute.manager [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68233) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1519.837242] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1519.837557] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1520.340802] env[68233]: DEBUG nova.objects.instance [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'migration_context' on Instance uuid 17f29ee1-39e8-4893-9fb6-64694448c60f {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1520.886250] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0347472f-5827-4538-81d4-a1cbdb18f970 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.894069] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70c6301-72a8-42e7-b7ec-83ca7e7d1fe8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.923530] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b09af4-6343-4521-8658-10318f992e8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.931394] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ec7b27-6c7a-40e6-b773-fa4c230f8cc4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.944632] env[68233]: DEBUG nova.compute.provider_tree [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.447596] env[68233]: DEBUG nova.scheduler.client.report [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1522.461349] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.624s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1523.996302] env[68233]: INFO nova.compute.manager [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Swapping old allocation on dict_keys(['51aa13e7-0977-4031-b209-4ae90c83752c']) held by migration 04b025f4-b4f8-4670-abe7-81079e7b0aa0 for instance [ 1524.017974] env[68233]: DEBUG nova.scheduler.client.report [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Overwriting current allocation {'allocations': {'51aa13e7-0977-4031-b209-4ae90c83752c': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 186}}, 'project_id': 'eb98b234b87a4120ad06095426f74ce0', 'user_id': '18e896b81a32439ebc3db2aacd4d8967', 'consumer_generation': 1} on consumer 17f29ee1-39e8-4893-9fb6-64694448c60f {{(pid=68233) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1524.097904] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.098147] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1524.098335] env[68233]: DEBUG nova.network.neutron [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1524.800829] env[68233]: DEBUG nova.network.neutron [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [{"id": "f7cab745-6917-4d44-9f7c-057846281627", "address": "fa:16:3e:be:23:a2", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7cab745-69", "ovs_interfaceid": "f7cab745-6917-4d44-9f7c-057846281627", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1525.303503] env[68233]: DEBUG oslo_concurrency.lockutils [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-17f29ee1-39e8-4893-9fb6-64694448c60f" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1525.304536] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-775b78fa-e739-4f4f-94c6-c583a9a15c0a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.311446] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc8f50a-8395-482f-a1fb-ddebbda665fb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.388177] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.388519] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf009cce-042c-48a0-b0c9-75f8d3a24d49 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.396268] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1526.396268] env[68233]: value = "task-2783468" [ 1526.396268] env[68233]: _type = "Task" [ 1526.396268] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.403929] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.905236] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783468, 'name': PowerOffVM_Task, 'duration_secs': 0.181822} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.905499] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1526.906137] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1526.906362] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.906515] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1526.906698] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.906844] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1526.906991] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1526.907208] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1526.907365] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1526.907529] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1526.907688] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1526.907885] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1526.912730] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6106713a-a615-44fd-8a66-525d10bf36fe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.927744] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1526.927744] env[68233]: value = "task-2783469" [ 1526.927744] env[68233]: _type = "Task" [ 1526.927744] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.936196] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783469, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.437456] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783469, 'name': ReconfigVM_Task, 'duration_secs': 0.141963} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.438881] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69970a30-232b-4962-bab9-296a7b9e995f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.459655] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1527.459655] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1527.459655] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1527.459655] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1527.459945] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1527.459945] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1527.460147] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1527.460348] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1527.460532] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1527.460695] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1527.460860] env[68233]: DEBUG nova.virt.hardware [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1527.461626] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-376db7c6-ad14-4dad-88e5-b492a6f9f6fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.466667] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1527.466667] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524dea3d-9568-2d9e-c2ae-1fb005cddf3e" [ 1527.466667] env[68233]: _type = "Task" [ 1527.466667] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.474103] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524dea3d-9568-2d9e-c2ae-1fb005cddf3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.976400] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]524dea3d-9568-2d9e-c2ae-1fb005cddf3e, 'name': SearchDatastore_Task, 'duration_secs': 0.008374} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.981642] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1527.981907] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0efe956f-933b-4195-bc40-21fcf3dd5cb1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.999432] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1527.999432] env[68233]: value = "task-2783470" [ 1527.999432] env[68233]: _type = "Task" [ 1527.999432] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.006839] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783470, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.509615] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783470, 'name': ReconfigVM_Task, 'duration_secs': 0.213993} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.510025] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to detach disk 2000 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1528.510630] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d862c2bf-f0c4-4a1c-9f16-e8144082d223 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.534606] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to attach disk [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1528.534850] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6e410b4-f93a-431a-9f69-221a292421cf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.551976] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1528.551976] env[68233]: value = "task-2783471" [ 1528.551976] env[68233]: _type = "Task" [ 1528.551976] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.559429] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783471, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.062107] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783471, 'name': ReconfigVM_Task, 'duration_secs': 0.266109} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.062368] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to attach disk [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f/17f29ee1-39e8-4893-9fb6-64694448c60f.vmdk or device None with type thin {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1529.063193] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56790ea4-75ba-433b-aecd-9d9a876293a3 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.083473] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e2b05e-2aa7-4ca9-883a-0449a517680f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.104540] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3513bcd3-8885-493f-b28a-9ef85ff84dcc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.124674] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4976e24d-4e67-4b78-8746-853e14a7279a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.131622] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1529.131846] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd97e073-206a-4f5f-a275-14c253a73454 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.138344] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1529.138344] env[68233]: value = "task-2783472" [ 1529.138344] env[68233]: _type = "Task" [ 1529.138344] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.145465] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.647286] env[68233]: DEBUG oslo_vmware.api [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783472, 'name': PowerOnVM_Task, 'duration_secs': 0.359429} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.647641] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.686353] env[68233]: INFO nova.compute.manager [None req-f88b8812-d169-4f3c-ab15-e0db9cc09db1 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance to original state: 'active' [ 1532.617630] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1532.618153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1532.618153] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1532.618335] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1532.618504] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1532.620580] env[68233]: INFO nova.compute.manager [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Terminating instance [ 1533.124667] env[68233]: DEBUG nova.compute.manager [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1533.124751] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1533.125054] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ade7d9e0-3ea8-4137-8f4c-1a2b4f7360e4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.132056] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1533.132056] env[68233]: value = "task-2783473" [ 1533.132056] env[68233]: _type = "Task" [ 1533.132056] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.139816] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783473, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.642362] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783473, 'name': PowerOffVM_Task, 'duration_secs': 0.180571} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.642755] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1533.642812] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Volume detach. Driver type: vmdk {{(pid=68233) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1533.642995] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559563', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'name': 'volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '17f29ee1-39e8-4893-9fb6-64694448c60f', 'attached_at': '2025-03-06T04:05:20.000000', 'detached_at': '', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'serial': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1533.643748] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93001ae5-3170-42f5-88be-797b43fc50ec {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.663837] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c1cb91-40cf-4a54-9441-97d1226b2594 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.669903] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a74e43-82ad-4177-95ef-726884cd883e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.688867] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2a0d26-3026-4be7-a3bd-8bddaa932262 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.702409] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] The volume has not been displaced from its original location: [datastore2] volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec/volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec.vmdk. No consolidation needed. {{(pid=68233) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1533.707386] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfiguring VM instance instance-0000007f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1533.707621] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b81fc36c-0d81-4845-a71b-7cabbb86b658 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.724429] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1533.724429] env[68233]: value = "task-2783474" [ 1533.724429] env[68233]: _type = "Task" [ 1533.724429] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.731353] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.234237] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783474, 'name': ReconfigVM_Task, 'duration_secs': 0.187412} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.234507] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Reconfigured VM instance instance-0000007f to detach disk 2001 {{(pid=68233) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1534.238997] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-431d00ed-3830-4fdd-8c18-b9a811cccb9f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.253375] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1534.253375] env[68233]: value = "task-2783475" [ 1534.253375] env[68233]: _type = "Task" [ 1534.253375] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.260572] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.764464] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783475, 'name': ReconfigVM_Task, 'duration_secs': 0.129356} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.764857] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-559563', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'name': 'volume-a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '17f29ee1-39e8-4893-9fb6-64694448c60f', 'attached_at': '2025-03-06T04:05:20.000000', 'detached_at': '', 'volume_id': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec', 'serial': 'a7cd6ead-11d4-4191-8f05-2f18b8a488ec'} {{(pid=68233) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1534.764952] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1534.765697] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e67fa7-90ec-4313-9b87-a7863826c36f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.771830] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1534.772047] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4ffa074-13b7-4ae7-b22a-4395ca0d1e4d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.837592] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1534.837805] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1534.838043] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleting the datastore file [datastore2] 17f29ee1-39e8-4893-9fb6-64694448c60f {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1534.838309] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a8b1e6a-2752-4548-93df-f693782aa442 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.867680] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1534.867680] env[68233]: value = "task-2783477" [ 1534.867680] env[68233]: _type = "Task" [ 1534.867680] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.867680] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.353866] env[68233]: DEBUG oslo_vmware.api [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135603} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.354163] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1535.354390] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1535.354598] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1535.354805] env[68233]: INFO nova.compute.manager [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1535.355065] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1535.355258] env[68233]: DEBUG nova.compute.manager [-] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1535.355449] env[68233]: DEBUG nova.network.neutron [-] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1535.861240] env[68233]: DEBUG nova.compute.manager [req-4ea0b123-8980-405a-ad4b-08b249d025f5 req-0f0cc993-299c-41ce-95f0-aa7504c7c7f0 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Received event network-vif-deleted-f7cab745-6917-4d44-9f7c-057846281627 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1535.861240] env[68233]: INFO nova.compute.manager [req-4ea0b123-8980-405a-ad4b-08b249d025f5 req-0f0cc993-299c-41ce-95f0-aa7504c7c7f0 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Neutron deleted interface f7cab745-6917-4d44-9f7c-057846281627; detaching it from the instance and deleting it from the info cache [ 1535.861240] env[68233]: DEBUG nova.network.neutron [req-4ea0b123-8980-405a-ad4b-08b249d025f5 req-0f0cc993-299c-41ce-95f0-aa7504c7c7f0 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.294728] env[68233]: DEBUG nova.network.neutron [-] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.364203] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30c536b8-556e-4244-88e1-8d4627e5c9f7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.373625] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193bf69c-bc5c-4db5-9c70-23b3ea4cf69a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.396887] env[68233]: DEBUG nova.compute.manager [req-4ea0b123-8980-405a-ad4b-08b249d025f5 req-0f0cc993-299c-41ce-95f0-aa7504c7c7f0 service nova] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Detach interface failed, port_id=f7cab745-6917-4d44-9f7c-057846281627, reason: Instance 17f29ee1-39e8-4893-9fb6-64694448c60f could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1536.798115] env[68233]: INFO nova.compute.manager [-] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Took 1.44 seconds to deallocate network for instance. [ 1537.339441] env[68233]: INFO nova.compute.manager [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: 17f29ee1-39e8-4893-9fb6-64694448c60f] Took 0.54 seconds to detach 1 volumes for instance. [ 1537.846016] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1537.846334] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1537.846525] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1537.867971] env[68233]: INFO nova.scheduler.client.report [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted allocations for instance 17f29ee1-39e8-4893-9fb6-64694448c60f [ 1538.375269] env[68233]: DEBUG oslo_concurrency.lockutils [None req-821a2fab-ae6d-4436-9e5d-1865f2860473 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "17f29ee1-39e8-4893-9fb6-64694448c60f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.757s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1539.801169] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1539.801439] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1540.305056] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Starting instance... {{(pid=68233) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1540.824885] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1540.825179] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1540.827086] env[68233]: INFO nova.compute.claims [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1541.862162] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0f2213-9d83-478e-9f18-d053aaf0e27f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.869427] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad2dcb0-7230-4641-8678-d1eee745e59b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.898198] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d22e7f-7f8c-4828-9401-0bc5cd904f5d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.904634] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f48b1b-6942-4b4f-af2b-8044029e4fba {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.917988] env[68233]: DEBUG nova.compute.provider_tree [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1542.420876] env[68233]: DEBUG nova.scheduler.client.report [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1542.925991] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.101s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1542.926563] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Start building networks asynchronously for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1543.432904] env[68233]: DEBUG nova.compute.utils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Using /dev/sd instead of None {{(pid=68233) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1543.434636] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Allocating IP information in the background. {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1543.434815] env[68233]: DEBUG nova.network.neutron [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] allocate_for_instance() {{(pid=68233) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1543.486810] env[68233]: DEBUG nova.policy [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18e896b81a32439ebc3db2aacd4d8967', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eb98b234b87a4120ad06095426f74ce0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68233) authorize /opt/stack/nova/nova/policy.py:192}} [ 1543.730178] env[68233]: DEBUG nova.network.neutron [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Successfully created port: d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1543.940019] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Start building block device mappings for instance. {{(pid=68233) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1544.948523] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Start spawning the instance on the hypervisor. {{(pid=68233) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1544.973663] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-06T03:47:24Z,direct_url=,disk_format='vmdk',id=da133fda-e1e2-42a1-a7e0-b8b1426a8490,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b18dbcbda07a49409f8351bc3bf7427c',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-06T03:47:25Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1544.973906] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.974074] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1544.974265] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.974413] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1544.974637] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1544.974826] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1544.974976] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1544.975165] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1544.975331] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1544.975504] env[68233]: DEBUG nova.virt.hardware [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1544.976402] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba517b9-abf5-403d-9417-d493be820bbe {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.984248] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95b6229d-df82-4dfc-b54d-91a64cae6643 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.076377] env[68233]: DEBUG nova.compute.manager [req-6dc5ad70-5e7e-49b0-896f-edfb336a9c19 req-417f9d77-1932-4949-9e5c-158e66f096a3 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-vif-plugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1545.076377] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dc5ad70-5e7e-49b0-896f-edfb336a9c19 req-417f9d77-1932-4949-9e5c-158e66f096a3 service nova] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1545.076377] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dc5ad70-5e7e-49b0-896f-edfb336a9c19 req-417f9d77-1932-4949-9e5c-158e66f096a3 service nova] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1545.076377] env[68233]: DEBUG oslo_concurrency.lockutils [req-6dc5ad70-5e7e-49b0-896f-edfb336a9c19 req-417f9d77-1932-4949-9e5c-158e66f096a3 service nova] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1545.076377] env[68233]: DEBUG nova.compute.manager [req-6dc5ad70-5e7e-49b0-896f-edfb336a9c19 req-417f9d77-1932-4949-9e5c-158e66f096a3 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] No waiting events found dispatching network-vif-plugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1545.076377] env[68233]: WARNING nova.compute.manager [req-6dc5ad70-5e7e-49b0-896f-edfb336a9c19 req-417f9d77-1932-4949-9e5c-158e66f096a3 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received unexpected event network-vif-plugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 for instance with vm_state building and task_state spawning. [ 1545.148166] env[68233]: DEBUG nova.network.neutron [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Successfully updated port: d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.651081] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.651243] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1545.651397] env[68233]: DEBUG nova.network.neutron [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1546.180666] env[68233]: DEBUG nova.network.neutron [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Instance cache missing network info. {{(pid=68233) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1546.292788] env[68233]: DEBUG nova.network.neutron [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.795385] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1546.795716] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Instance network_info: |[{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68233) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1546.796171] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:df:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3173b1c-b02e-4c91-90fc-b8fd76b7e954', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1546.803645] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1546.803853] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1546.804085] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-767bd8c7-662e-4f61-a427-8b505c603cb4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.824876] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1546.824876] env[68233]: value = "task-2783478" [ 1546.824876] env[68233]: _type = "Task" [ 1546.824876] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.832307] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783478, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.101726] env[68233]: DEBUG nova.compute.manager [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1547.101946] env[68233]: DEBUG nova.compute.manager [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing instance network info cache due to event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1547.102261] env[68233]: DEBUG oslo_concurrency.lockutils [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.102504] env[68233]: DEBUG oslo_concurrency.lockutils [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.102704] env[68233]: DEBUG nova.network.neutron [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1547.334899] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783478, 'name': CreateVM_Task, 'duration_secs': 0.323503} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.335263] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1547.335718] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.335886] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.336221] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1547.336466] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2591af1-50ef-4369-ae1e-7103cfd42ef8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.340810] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1547.340810] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5211fae0-f197-7bfb-1070-669a824c9cbb" [ 1547.340810] env[68233]: _type = "Task" [ 1547.340810] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.348054] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5211fae0-f197-7bfb-1070-669a824c9cbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.771235] env[68233]: DEBUG nova.network.neutron [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updated VIF entry in instance network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.771596] env[68233]: DEBUG nova.network.neutron [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.850697] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5211fae0-f197-7bfb-1070-669a824c9cbb, 'name': SearchDatastore_Task, 'duration_secs': 0.010918} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.851322] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1547.851322] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Processing image da133fda-e1e2-42a1-a7e0-b8b1426a8490 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1547.851467] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.851559] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.851711] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1547.851955] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1212a6ad-f45c-45fe-89ec-641d57eaca02 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.859676] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1547.859943] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1547.860657] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e89281a3-7098-46ca-b0a6-e1690837dc27 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.865201] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1547.865201] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d89e0e-f1af-95b5-82b4-54d22a98cd96" [ 1547.865201] env[68233]: _type = "Task" [ 1547.865201] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.872198] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d89e0e-f1af-95b5-82b4-54d22a98cd96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.115924] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1548.274568] env[68233]: DEBUG oslo_concurrency.lockutils [req-e2af3844-a945-41e4-b612-e38b9579b17e req-c05d392f-e764-4a60-9880-dde4ba444614 service nova] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1548.375771] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52d89e0e-f1af-95b5-82b4-54d22a98cd96, 'name': SearchDatastore_Task, 'duration_secs': 0.008124} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.376514] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da4d1c42-d408-499b-8e23-62bd79fd1550 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.381211] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1548.381211] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521e2d3f-175b-459a-4736-c26cc5507cff" [ 1548.381211] env[68233]: _type = "Task" [ 1548.381211] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.388134] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521e2d3f-175b-459a-4736-c26cc5507cff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.891953] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]521e2d3f-175b-459a-4736-c26cc5507cff, 'name': SearchDatastore_Task, 'duration_secs': 0.009386} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.892238] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1548.892502] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1548.892753] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a12d792-c3ff-4f28-9157-35401c19c715 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.899994] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1548.899994] env[68233]: value = "task-2783479" [ 1548.899994] env[68233]: _type = "Task" [ 1548.899994] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.907468] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783479, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.409768] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783479, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.420345} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.410115] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/da133fda-e1e2-42a1-a7e0-b8b1426a8490/da133fda-e1e2-42a1-a7e0-b8b1426a8490.vmdk to [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1549.410263] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Extending root virtual disk to 1048576 {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1549.410435] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad2baaf5-1bab-4dfd-bc4f-68b4a2522e96 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.418075] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1549.418075] env[68233]: value = "task-2783480" [ 1549.418075] env[68233]: _type = "Task" [ 1549.418075] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.425999] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783480, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.928336] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783480, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070416} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.928486] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Extended root virtual disk {{(pid=68233) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1549.929314] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f952da6-9f63-45b2-9d66-a2881c024b4b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.950807] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1549.951054] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b212751a-8278-4ad2-bb1e-a62e2974e2db {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.970975] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1549.970975] env[68233]: value = "task-2783481" [ 1549.970975] env[68233]: _type = "Task" [ 1549.970975] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.979528] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.116392] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1550.480967] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783481, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.981734] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783481, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.115471] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.115760] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.115979] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1551.482658] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783481, 'name': ReconfigVM_Task, 'duration_secs': 1.282346} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.482992] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Reconfigured VM instance instance-00000080 to attach disk [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk or device None with type sparse {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1551.484308] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68f39042-1abf-4d48-b4ca-6915288553ff {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.490257] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1551.490257] env[68233]: value = "task-2783482" [ 1551.490257] env[68233]: _type = "Task" [ 1551.490257] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.497339] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783482, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.619678] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.619918] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.620102] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1551.620272] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1551.621189] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f4523a-0d00-4dff-b63f-f87cdcdf6249 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.630108] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6634eac-680d-46eb-9477-75486fb11156 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.644231] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7778f8ea-bbd9-4b62-8d74-3c67d06b51c8 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.650139] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968796e2-2f9f-44f1-b54c-7ec46faa89c5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.679087] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180923MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1551.679234] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1551.679436] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.999922] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783482, 'name': Rename_Task, 'duration_secs': 0.14569} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.000195] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1552.000446] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9b00843-27d6-4d0b-9ef7-568317a6e6fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.006304] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1552.006304] env[68233]: value = "task-2783483" [ 1552.006304] env[68233]: _type = "Task" [ 1552.006304] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.013366] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783483, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.516143] env[68233]: DEBUG oslo_vmware.api [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783483, 'name': PowerOnVM_Task, 'duration_secs': 0.454311} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.516464] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1552.516646] env[68233]: INFO nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Took 7.57 seconds to spawn the instance on the hypervisor. [ 1552.516824] env[68233]: DEBUG nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1552.517576] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd889e0-6bfe-4a87-bc39-ac6c6b40e6ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.704237] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance fa192f7d-76e7-4cab-b32e-af8ef3c5839a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1552.704436] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1552.704579] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1552.730639] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28aa6e6d-5991-4159-b676-05a04078e805 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.738612] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8afb8b-2929-43a4-a85c-275e4ef7d358 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.768338] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ce14afb-af9b-4e7b-9e73-5e25d7fc6d0c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.775142] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa937d54-7338-4291-a6e4-2e6f23ed1ba4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.787722] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1553.036641] env[68233]: INFO nova.compute.manager [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Took 12.23 seconds to build instance. [ 1553.290646] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1553.539044] env[68233]: DEBUG oslo_concurrency.lockutils [None req-3c8d84e5-d576-469a-9de3-6980960abc1b tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.737s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1553.795624] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1553.795836] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1553.848467] env[68233]: DEBUG nova.compute.manager [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1553.848679] env[68233]: DEBUG nova.compute.manager [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing instance network info cache due to event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1553.848932] env[68233]: DEBUG oslo_concurrency.lockutils [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.849155] env[68233]: DEBUG oslo_concurrency.lockutils [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1553.849365] env[68233]: DEBUG nova.network.neutron [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1554.545143] env[68233]: DEBUG nova.network.neutron [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updated VIF entry in instance network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.545528] env[68233]: DEBUG nova.network.neutron [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.796819] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.796819] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1554.796819] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1555.048799] env[68233]: DEBUG oslo_concurrency.lockutils [req-90732bed-1035-43f6-93ad-619240aa2b62 req-706bcc9b-e23a-4ce7-a824-b9a115eb7072 service nova] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1557.111789] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1590.656305] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1590.656681] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1590.656814] env[68233]: INFO nova.compute.manager [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Shelving [ 1591.666259] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1591.666592] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0a93194-5c15-409d-9d98-1407062401be {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.674627] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1591.674627] env[68233]: value = "task-2783484" [ 1591.674627] env[68233]: _type = "Task" [ 1591.674627] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.682296] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.184639] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783484, 'name': PowerOffVM_Task, 'duration_secs': 0.170745} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.184930] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1592.185787] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6c714e-9fcb-4496-ba66-84a2bca6c7d7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.203568] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff120a8-1a94-4e83-9e14-2c3ad8d9a316 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.713316] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Creating Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1592.713682] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7c97a5d3-d838-4984-9939-13d4cde4a271 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.721223] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1592.721223] env[68233]: value = "task-2783485" [ 1592.721223] env[68233]: _type = "Task" [ 1592.721223] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.729077] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783485, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.231448] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783485, 'name': CreateSnapshot_Task, 'duration_secs': 0.402537} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.231711] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Created Snapshot of the VM instance {{(pid=68233) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1593.232448] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492f5a90-cf07-410e-8c32-2be5f45f299c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.750053] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Creating linked-clone VM from snapshot {{(pid=68233) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1593.750053] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bfd379a0-ddc0-44ef-9bb5-2901571d491a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.758823] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1593.758823] env[68233]: value = "task-2783486" [ 1593.758823] env[68233]: _type = "Task" [ 1593.758823] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.766490] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783486, 'name': CloneVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.268203] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783486, 'name': CloneVM_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.770725] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783486, 'name': CloneVM_Task, 'duration_secs': 0.934079} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.771086] env[68233]: INFO nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Created linked-clone VM from snapshot [ 1594.771732] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6aa00e-d42c-4df7-9435-2431d9680e88 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.778419] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Uploading image b987d1c1-feb5-4660-9d37-51b9716268e2 {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1594.798554] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1594.798554] env[68233]: value = "vm-559566" [ 1594.798554] env[68233]: _type = "VirtualMachine" [ 1594.798554] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1594.798774] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f24ef5ba-d6eb-4f7a-9bbc-d44608c75fca {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.804692] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease: (returnval){ [ 1594.804692] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526235c6-c924-cbf7-a9bb-dfe50302bbc8" [ 1594.804692] env[68233]: _type = "HttpNfcLease" [ 1594.804692] env[68233]: } obtained for exporting VM: (result){ [ 1594.804692] env[68233]: value = "vm-559566" [ 1594.804692] env[68233]: _type = "VirtualMachine" [ 1594.804692] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1594.804944] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the lease: (returnval){ [ 1594.804944] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526235c6-c924-cbf7-a9bb-dfe50302bbc8" [ 1594.804944] env[68233]: _type = "HttpNfcLease" [ 1594.804944] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1594.810467] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1594.810467] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526235c6-c924-cbf7-a9bb-dfe50302bbc8" [ 1594.810467] env[68233]: _type = "HttpNfcLease" [ 1594.810467] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1595.313391] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1595.313391] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526235c6-c924-cbf7-a9bb-dfe50302bbc8" [ 1595.313391] env[68233]: _type = "HttpNfcLease" [ 1595.313391] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1595.313688] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1595.313688] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]526235c6-c924-cbf7-a9bb-dfe50302bbc8" [ 1595.313688] env[68233]: _type = "HttpNfcLease" [ 1595.313688] env[68233]: }. {{(pid=68233) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1595.314367] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffdac0d-528c-440b-acd1-2f10a55f33d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.321524] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dd968f-aa9d-a250-e716-f8f1d2f29b4c/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1595.321686] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dd968f-aa9d-a250-e716-f8f1d2f29b4c/disk-0.vmdk for reading. {{(pid=68233) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1595.409032] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d096de3f-5402-42ee-8090-54a095388068 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.456588] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dd968f-aa9d-a250-e716-f8f1d2f29b4c/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1603.457594] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3be22d0-98ea-408d-ba08-f9ed0e7a40d6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.463570] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dd968f-aa9d-a250-e716-f8f1d2f29b4c/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1603.463735] env[68233]: ERROR oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dd968f-aa9d-a250-e716-f8f1d2f29b4c/disk-0.vmdk due to incomplete transfer. [ 1603.463947] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4ab161a3-42d8-42a7-b7b2-1e3ce33c326f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.471398] env[68233]: DEBUG oslo_vmware.rw_handles [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52dd968f-aa9d-a250-e716-f8f1d2f29b4c/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1603.471594] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Uploaded image b987d1c1-feb5-4660-9d37-51b9716268e2 to the Glance image server {{(pid=68233) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1603.473996] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Destroying the VM {{(pid=68233) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1603.474225] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b079f66d-80c1-4763-ab9d-fdbc5f92b4fd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.479669] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1603.479669] env[68233]: value = "task-2783488" [ 1603.479669] env[68233]: _type = "Task" [ 1603.479669] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.487079] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783488, 'name': Destroy_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.989619] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783488, 'name': Destroy_Task, 'duration_secs': 0.360406} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.989877] env[68233]: INFO nova.virt.vmwareapi.vm_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Destroyed the VM [ 1603.990145] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deleting Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1603.990386] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-719a9c95-7ebe-4be1-a51f-02785e612189 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.995842] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1603.995842] env[68233]: value = "task-2783489" [ 1603.995842] env[68233]: _type = "Task" [ 1603.995842] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.003060] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783489, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.505065] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783489, 'name': RemoveSnapshot_Task, 'duration_secs': 0.330532} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.505405] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deleted Snapshot of the VM instance {{(pid=68233) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1604.505602] env[68233]: DEBUG nova.compute.manager [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.506356] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2279bdf1-365a-4cd4-bbc7-6ae0ffedceeb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.018416] env[68233]: INFO nova.compute.manager [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Shelve offloading [ 1605.522436] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.522800] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6f62cc0-8b26-41de-b7ca-e99e224cca1e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.530706] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1605.530706] env[68233]: value = "task-2783490" [ 1605.530706] env[68233]: _type = "Task" [ 1605.530706] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.538213] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783490, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.041571] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] VM already powered off {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1606.041837] env[68233]: DEBUG nova.compute.manager [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1606.042615] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a557010a-8441-486a-a9f1-21414ff7b6fa {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.048061] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.048267] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1606.048497] env[68233]: DEBUG nova.network.neutron [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1606.739305] env[68233]: DEBUG nova.network.neutron [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.243052] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1607.434124] env[68233]: DEBUG nova.compute.manager [req-5894899e-d3b0-485b-a0f7-9ef2f5b947f2 req-7cc76d5e-9123-4508-a4f5-7ee053e2d2f0 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-vif-unplugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1607.434488] env[68233]: DEBUG oslo_concurrency.lockutils [req-5894899e-d3b0-485b-a0f7-9ef2f5b947f2 req-7cc76d5e-9123-4508-a4f5-7ee053e2d2f0 service nova] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1607.434828] env[68233]: DEBUG oslo_concurrency.lockutils [req-5894899e-d3b0-485b-a0f7-9ef2f5b947f2 req-7cc76d5e-9123-4508-a4f5-7ee053e2d2f0 service nova] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1607.435081] env[68233]: DEBUG oslo_concurrency.lockutils [req-5894899e-d3b0-485b-a0f7-9ef2f5b947f2 req-7cc76d5e-9123-4508-a4f5-7ee053e2d2f0 service nova] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1607.435345] env[68233]: DEBUG nova.compute.manager [req-5894899e-d3b0-485b-a0f7-9ef2f5b947f2 req-7cc76d5e-9123-4508-a4f5-7ee053e2d2f0 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] No waiting events found dispatching network-vif-unplugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1607.435626] env[68233]: WARNING nova.compute.manager [req-5894899e-d3b0-485b-a0f7-9ef2f5b947f2 req-7cc76d5e-9123-4508-a4f5-7ee053e2d2f0 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received unexpected event network-vif-unplugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 for instance with vm_state shelved and task_state shelving_offloading. [ 1607.526100] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1607.527011] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061f8d26-678d-4ca2-b264-3484a26dc430 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.534181] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1607.534403] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3dc65a9-4fd3-45d3-a875-26403049a8ac {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.603353] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1607.603565] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1607.603740] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleting the datastore file [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1607.603989] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abcb08ab-c677-4176-a48f-0e5c07d5d1f6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.609811] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1607.609811] env[68233]: value = "task-2783492" [ 1607.609811] env[68233]: _type = "Task" [ 1607.609811] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.616809] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.120282] env[68233]: DEBUG oslo_vmware.api [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136298} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.120585] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1608.120708] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1608.120897] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1608.145141] env[68233]: INFO nova.scheduler.client.report [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted allocations for instance fa192f7d-76e7-4cab-b32e-af8ef3c5839a [ 1608.650143] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1608.650456] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1608.650673] env[68233]: DEBUG nova.objects.instance [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'resources' on Instance uuid fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.115609] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1609.153564] env[68233]: DEBUG nova.objects.instance [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'numa_topology' on Instance uuid fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1609.466046] env[68233]: DEBUG nova.compute.manager [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1609.466237] env[68233]: DEBUG nova.compute.manager [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing instance network info cache due to event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1609.466594] env[68233]: DEBUG oslo_concurrency.lockutils [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.466594] env[68233]: DEBUG oslo_concurrency.lockutils [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1609.466738] env[68233]: DEBUG nova.network.neutron [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1609.656382] env[68233]: DEBUG nova.objects.base [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68233) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1609.672804] env[68233]: DEBUG nova.scheduler.client.report [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Refreshing inventories for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1609.685767] env[68233]: DEBUG nova.scheduler.client.report [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Updating ProviderTree inventory for provider 51aa13e7-0977-4031-b209-4ae90c83752c from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1609.685989] env[68233]: DEBUG nova.compute.provider_tree [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Updating inventory in ProviderTree for provider 51aa13e7-0977-4031-b209-4ae90c83752c with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1609.699054] env[68233]: DEBUG nova.scheduler.client.report [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Refreshing aggregate associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, aggregates: None {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1609.717330] env[68233]: DEBUG nova.scheduler.client.report [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Refreshing trait associations for resource provider 51aa13e7-0977-4031-b209-4ae90c83752c, traits: COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_ISO {{(pid=68233) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1609.741880] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0431027a-9077-4f71-b5b6-c0488c5df79e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.749847] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d26a309-0c8b-4a31-857c-4cbfc4ddb657 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.483265] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c68b585-fb5d-400d-a9d0-d1ad67057a80 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.493895] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24073c70-e207-4fb0-8945-be1f82370917 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.507554] env[68233]: DEBUG nova.compute.provider_tree [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.884752] env[68233]: DEBUG nova.network.neutron [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updated VIF entry in instance network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1610.885117] env[68233]: DEBUG nova.network.neutron [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.987608] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1611.010173] env[68233]: DEBUG nova.scheduler.client.report [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1611.116217] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.116400] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1611.388328] env[68233]: DEBUG oslo_concurrency.lockutils [req-4c565813-68d0-4471-87fb-1ac001b69bcf req-c359dfe4-d810-4b7f-ab1e-786a48ca816f service nova] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1611.514589] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.864s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1612.022868] env[68233]: DEBUG oslo_concurrency.lockutils [None req-79a4df85-3c76-4d5e-8899-9d3a0c305286 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.366s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1612.023578] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.036s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1612.023763] env[68233]: INFO nova.compute.manager [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Unshelving [ 1612.115946] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1612.116121] env[68233]: DEBUG nova.compute.manager [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68233) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11072}} [ 1613.046698] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1613.046960] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1613.047189] env[68233]: DEBUG nova.objects.instance [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'pci_requests' on Instance uuid fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.115249] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.115443] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1613.552036] env[68233]: DEBUG nova.objects.instance [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'numa_topology' on Instance uuid fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.618827] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1614.054297] env[68233]: INFO nova.compute.claims [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1615.089686] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e298f1-01f7-4182-b22b-19a6f8c3342b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.097571] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa45252-2d63-4602-8a06-e6a4b65fa406 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.126150] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0141802f-3f8d-4c61-b96d-bc46c4beda56 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.132741] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d34e0da5-d012-46bb-b1a0-62da332ad087 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.145183] env[68233]: DEBUG nova.compute.provider_tree [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.648773] env[68233]: DEBUG nova.scheduler.client.report [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1616.153730] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1616.155990] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.537s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1616.156187] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1616.156343] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68233) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1616.157193] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b40cc7-5bb8-4a1a-92c6-47b4fc2ea7ef {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.165353] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2266488-8c5d-4a07-a9c4-267a6b81b8f1 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.178816] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b204b0f4-08ee-4284-8a04-72d961c48e40 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.184781] env[68233]: INFO nova.network.neutron [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating port d3173b1c-b02e-4c91-90fc-b8fd76b7e954 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1616.187051] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f4af54-5143-497d-91a0-7743e5eccb93 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.215463] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181092MB free_disk=176GB free_vcpus=48 pci_devices=None {{(pid=68233) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1616.215656] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1616.215823] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1617.236345] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Instance fa192f7d-76e7-4cab-b32e-af8ef3c5839a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68233) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1617.236599] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1617.236749] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=68233) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1617.261151] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3e7a03-65d9-4a43-b0f4-6374f89302eb {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.268371] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6682be44-1582-4e67-986e-1f34ae9be9de {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.298107] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f497b15d-fdbe-4e4a-bebf-4f2232a35c4e {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.304706] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44df40ea-6108-46d9-8db6-7560d996ea8b {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.317192] env[68233]: DEBUG nova.compute.provider_tree [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1617.532424] env[68233]: DEBUG nova.compute.manager [req-6c873bb4-8996-422e-bd76-521423cc099f req-bf79787c-b429-4709-ac9d-68f69a551fc2 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-vif-plugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1617.533030] env[68233]: DEBUG oslo_concurrency.lockutils [req-6c873bb4-8996-422e-bd76-521423cc099f req-bf79787c-b429-4709-ac9d-68f69a551fc2 service nova] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1617.533188] env[68233]: DEBUG oslo_concurrency.lockutils [req-6c873bb4-8996-422e-bd76-521423cc099f req-bf79787c-b429-4709-ac9d-68f69a551fc2 service nova] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1617.533358] env[68233]: DEBUG oslo_concurrency.lockutils [req-6c873bb4-8996-422e-bd76-521423cc099f req-bf79787c-b429-4709-ac9d-68f69a551fc2 service nova] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1617.533528] env[68233]: DEBUG nova.compute.manager [req-6c873bb4-8996-422e-bd76-521423cc099f req-bf79787c-b429-4709-ac9d-68f69a551fc2 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] No waiting events found dispatching network-vif-plugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1617.533690] env[68233]: WARNING nova.compute.manager [req-6c873bb4-8996-422e-bd76-521423cc099f req-bf79787c-b429-4709-ac9d-68f69a551fc2 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received unexpected event network-vif-plugged-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 for instance with vm_state shelved_offloaded and task_state spawning. [ 1617.613801] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.613971] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1617.614144] env[68233]: DEBUG nova.network.neutron [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Building network info cache for instance {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1617.819964] env[68233]: DEBUG nova.scheduler.client.report [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1618.300724] env[68233]: DEBUG nova.network.neutron [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.324598] env[68233]: DEBUG nova.compute.resource_tracker [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68233) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1618.324793] env[68233]: DEBUG oslo_concurrency.lockutils [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.109s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1618.803680] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1618.830480] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-06T03:47:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='483fad11ff5c42793d749893e291e94c',container_format='bare',created_at=2025-03-06T04:06:24Z,direct_url=,disk_format='vmdk',id=b987d1c1-feb5-4660-9d37-51b9716268e2,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1411183317-shelved',owner='eb98b234b87a4120ad06095426f74ce0',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2025-03-06T04:06:38Z,virtual_size=,visibility=), allow threads: False {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1618.830728] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1618.830946] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image limits 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1618.831194] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Flavor pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1618.831409] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Image pref 0:0:0 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1618.831586] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68233) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1618.831806] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1618.831965] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1618.832155] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Got 1 possible topologies {{(pid=68233) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1618.832330] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1618.833346] env[68233]: DEBUG nova.virt.hardware [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68233) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1618.833422] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea3ab6d-a51a-4f8f-86a2-8ed420e511bc {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.841958] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d31a72-f8c3-416b-8d52-4787138c585a {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.856009] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:df:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e6f11c0d-c73a-47f5-b02e-47bff48da0e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3173b1c-b02e-4c91-90fc-b8fd76b7e954', 'vif_model': 'vmxnet3'}] {{(pid=68233) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1618.863237] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1618.863457] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Creating VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1618.863726] env[68233]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ddea375-d699-4d8b-95e9-ec6288758396 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.882241] env[68233]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1618.882241] env[68233]: value = "task-2783493" [ 1618.882241] env[68233]: _type = "Task" [ 1618.882241] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.889139] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783493, 'name': CreateVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.392733] env[68233]: DEBUG oslo_vmware.api [-] Task: {'id': task-2783493, 'name': CreateVM_Task, 'duration_secs': 0.288794} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.393044] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Created VM on the ESX host {{(pid=68233) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1619.393574] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.393741] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1619.394183] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1619.394433] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d24d708c-0437-4480-be12-b946ca77f358 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.398586] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1619.398586] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52079441-6452-44d9-232c-88ce07ea814f" [ 1619.398586] env[68233]: _type = "Task" [ 1619.398586] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.408598] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52079441-6452-44d9-232c-88ce07ea814f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.557830] env[68233]: DEBUG nova.compute.manager [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1619.558040] env[68233]: DEBUG nova.compute.manager [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing instance network info cache due to event network-changed-d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11661}} [ 1619.558252] env[68233]: DEBUG oslo_concurrency.lockutils [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] Acquiring lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.558396] env[68233]: DEBUG oslo_concurrency.lockutils [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] Acquired lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1619.558583] env[68233]: DEBUG nova.network.neutron [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Refreshing network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1619.908252] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1619.908528] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Processing image b987d1c1-feb5-4660-9d37-51b9716268e2 {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1619.908776] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.908932] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquired lock "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1619.909131] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1619.909372] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9618a74b-b18f-42d3-84c9-77693b761395 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.917447] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1619.917621] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68233) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1619.918289] env[68233]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2476f50-1be8-47c7-8325-4ad565f72ac2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.922913] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1619.922913] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5249d3b4-8cba-6eb0-8922-1116fc227f00" [ 1619.922913] env[68233]: _type = "Task" [ 1619.922913] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.931661] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]5249d3b4-8cba-6eb0-8922-1116fc227f00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.286029] env[68233]: DEBUG nova.network.neutron [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updated VIF entry in instance network info cache for port d3173b1c-b02e-4c91-90fc-b8fd76b7e954. {{(pid=68233) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1620.286407] env[68233]: DEBUG nova.network.neutron [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [{"id": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "address": "fa:16:3e:6e:df:89", "network": {"id": "86e47004-cc5e-4194-b502-1442d99ac9c8", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1891363394-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.183", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eb98b234b87a4120ad06095426f74ce0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e6f11c0d-c73a-47f5-b02e-47bff48da0e4", "external-id": "nsx-vlan-transportzone-345", "segmentation_id": 345, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3173b1c-b0", "ovs_interfaceid": "d3173b1c-b02e-4c91-90fc-b8fd76b7e954", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.433912] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Preparing fetch location {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1620.434259] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Fetch image to [datastore2] OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e/OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e.vmdk {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1620.434307] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Downloading stream optimized image b987d1c1-feb5-4660-9d37-51b9716268e2 to [datastore2] OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e/OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e.vmdk on the data store datastore2 as vApp {{(pid=68233) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1620.434579] env[68233]: DEBUG nova.virt.vmwareapi.images [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Downloading image file data b987d1c1-feb5-4660-9d37-51b9716268e2 to the ESX as VM named 'OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e' {{(pid=68233) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1620.498431] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1620.498431] env[68233]: value = "resgroup-9" [ 1620.498431] env[68233]: _type = "ResourcePool" [ 1620.498431] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1620.498738] env[68233]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d103524e-fd8b-42c6-8979-a348db3bfac7 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.518408] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease: (returnval){ [ 1620.518408] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad6cd0-2d59-5edc-f5f6-d712f6d974d0" [ 1620.518408] env[68233]: _type = "HttpNfcLease" [ 1620.518408] env[68233]: } obtained for vApp import into resource pool (val){ [ 1620.518408] env[68233]: value = "resgroup-9" [ 1620.518408] env[68233]: _type = "ResourcePool" [ 1620.518408] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1620.518679] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the lease: (returnval){ [ 1620.518679] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad6cd0-2d59-5edc-f5f6-d712f6d974d0" [ 1620.518679] env[68233]: _type = "HttpNfcLease" [ 1620.518679] env[68233]: } to be ready. {{(pid=68233) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1620.524439] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1620.524439] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad6cd0-2d59-5edc-f5f6-d712f6d974d0" [ 1620.524439] env[68233]: _type = "HttpNfcLease" [ 1620.524439] env[68233]: } is initializing. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1620.789547] env[68233]: DEBUG oslo_concurrency.lockutils [req-cdbfc5b6-fb37-44c5-a43e-3190d1badd0c req-b6c3ecab-91a9-4a9f-b0f6-63de815caa7f service nova] Releasing lock "refresh_cache-fa192f7d-76e7-4cab-b32e-af8ef3c5839a" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1621.026894] env[68233]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1621.026894] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad6cd0-2d59-5edc-f5f6-d712f6d974d0" [ 1621.026894] env[68233]: _type = "HttpNfcLease" [ 1621.026894] env[68233]: } is ready. {{(pid=68233) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1621.027201] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1621.027201] env[68233]: value = "session[52ba27fc-b14c-b89b-abf6-6ce7ce547d4d]52ad6cd0-2d59-5edc-f5f6-d712f6d974d0" [ 1621.027201] env[68233]: _type = "HttpNfcLease" [ 1621.027201] env[68233]: }. {{(pid=68233) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1621.028798] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f0048b-737d-4d19-98f3-0ececdde19b6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.035217] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258dfb4-bed8-922e-1876-4aa23bb20ad8/disk-0.vmdk from lease info. {{(pid=68233) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1621.035393] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258dfb4-bed8-922e-1876-4aa23bb20ad8/disk-0.vmdk. {{(pid=68233) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1621.097464] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4e97470c-425e-451b-bb89-5a0e12ccebce {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.325544] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1621.326872] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1621.833087] env[68233]: DEBUG oslo_service.periodic_task [None req-d27c2fd2-7b81-4284-aaef-b47dc036c2a6 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68233) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1622.078031] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Completed reading data from the image iterator. {{(pid=68233) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1622.078308] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258dfb4-bed8-922e-1876-4aa23bb20ad8/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1622.079181] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93143fb4-9dfa-417e-a2e3-ebd915ad9360 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.086187] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258dfb4-bed8-922e-1876-4aa23bb20ad8/disk-0.vmdk is in state: ready. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1622.086350] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258dfb4-bed8-922e-1876-4aa23bb20ad8/disk-0.vmdk. {{(pid=68233) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1622.086643] env[68233]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-835834c7-e174-47d0-970b-7dc19617d4b2 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.281986] env[68233]: DEBUG oslo_vmware.rw_handles [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5258dfb4-bed8-922e-1876-4aa23bb20ad8/disk-0.vmdk. {{(pid=68233) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1622.282221] env[68233]: INFO nova.virt.vmwareapi.images [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Downloaded image file data b987d1c1-feb5-4660-9d37-51b9716268e2 [ 1622.283060] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d02170-a4ff-4afb-a201-3a391ebea446 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.298422] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f45c8fa-2bb8-4d0b-96f9-614ba197a6ed {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.322304] env[68233]: INFO nova.virt.vmwareapi.images [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] The imported VM was unregistered [ 1622.324694] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Caching image {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1622.324941] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Creating directory with path [datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.325194] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e06d3fa-4c9f-4148-a74a-b5983bd39b16 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.334636] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Created directory with path [datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2 {{(pid=68233) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.334815] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e/OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e.vmdk to [datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk. {{(pid=68233) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1622.335055] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-c98ee7d8-84ef-4407-9604-4409c4361716 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.341101] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1622.341101] env[68233]: value = "task-2783496" [ 1622.341101] env[68233]: _type = "Task" [ 1622.341101] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.348142] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783496, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.850907] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783496, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.351869] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783496, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.853761] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783496, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.353660] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783496, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.855230] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783496, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.368862} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.855596] env[68233]: INFO nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e/OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e.vmdk to [datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk. [ 1624.855691] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Cleaning up location [datastore2] OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e {{(pid=68233) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1624.855840] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_73bbbaf4-d7eb-4fb1-911c-621b44b9f51e {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.856107] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15f10e8d-3869-4408-b9ad-ac4393a5ece4 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.862098] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1624.862098] env[68233]: value = "task-2783497" [ 1624.862098] env[68233]: _type = "Task" [ 1624.862098] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.869292] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.371732] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.03862} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.372064] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.372178] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Releasing lock "[datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk" {{(pid=68233) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1625.372426] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk to [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1625.372670] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10652e60-0a49-4fb1-b824-e769d6178b0d {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.379571] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1625.379571] env[68233]: value = "task-2783498" [ 1625.379571] env[68233]: _type = "Task" [ 1625.379571] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.386335] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.890761] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783498, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.389988] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783498, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.891355] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783498, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.392532] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783498, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.894761] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783498, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.094766} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.895175] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/b987d1c1-feb5-4660-9d37-51b9716268e2/b987d1c1-feb5-4660-9d37-51b9716268e2.vmdk to [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk {{(pid=68233) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1627.895758] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2310c34c-c85b-41ce-bb0a-dc87e95650bf {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.916467] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1627.916725] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-176cf586-691c-4a89-bce8-04b4aa60d4ab {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.934814] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1627.934814] env[68233]: value = "task-2783499" [ 1627.934814] env[68233]: _type = "Task" [ 1627.934814] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.942478] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.444758] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783499, 'name': ReconfigVM_Task, 'duration_secs': 0.261608} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.445037] env[68233]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Reconfigured VM instance instance-00000080 to attach disk [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a/fa192f7d-76e7-4cab-b32e-af8ef3c5839a.vmdk or device None with type streamOptimized {{(pid=68233) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1628.445642] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9ef03f9e-b348-48ef-b754-169bad4db827 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.451186] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1628.451186] env[68233]: value = "task-2783500" [ 1628.451186] env[68233]: _type = "Task" [ 1628.451186] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.458183] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783500, 'name': Rename_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.961467] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783500, 'name': Rename_Task, 'duration_secs': 0.142452} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.961824] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powering on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1628.961951] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-406ed26d-f3be-4c9d-a028-7322411b683f {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.967911] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1628.967911] env[68233]: value = "task-2783501" [ 1628.967911] env[68233]: _type = "Task" [ 1628.967911] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.974966] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783501, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.477544] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783501, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.979862] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783501, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.479891] env[68233]: DEBUG oslo_vmware.api [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783501, 'name': PowerOnVM_Task, 'duration_secs': 1.015032} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.480231] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powered on the VM {{(pid=68233) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1630.567960] env[68233]: DEBUG nova.compute.manager [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Checking state {{(pid=68233) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1630.568918] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfa2bf1-63ca-4a6c-a68f-e973c82e3db9 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.086245] env[68233]: DEBUG oslo_concurrency.lockutils [None req-8ee63e68-8cae-481a-a12c-a4f0657c2b04 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.063s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.965826] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1631.966074] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1631.966304] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1631.966489] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1631.966662] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1631.968846] env[68233]: INFO nova.compute.manager [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Terminating instance [ 1632.473188] env[68233]: DEBUG nova.compute.manager [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Start destroying the instance on the hypervisor. {{(pid=68233) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1632.473604] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Destroying instance {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1632.474577] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f7ffec-44a6-4aaf-92e7-0e6aba862abd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.482524] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powering off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1632.482779] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67f2209e-36e6-40f0-ad18-dc7b9f0a6c8c {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.489931] env[68233]: DEBUG oslo_vmware.api [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1632.489931] env[68233]: value = "task-2783502" [ 1632.489931] env[68233]: _type = "Task" [ 1632.489931] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.497989] env[68233]: DEBUG oslo_vmware.api [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.000421] env[68233]: DEBUG oslo_vmware.api [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783502, 'name': PowerOffVM_Task, 'duration_secs': 0.197629} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.000685] env[68233]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Powered off the VM {{(pid=68233) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1633.000853] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Unregistering the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1633.001108] env[68233]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf53becd-7556-4c65-948f-c285c5a57956 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.066306] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Unregistered the VM {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1633.066529] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deleting contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1633.066759] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleting the datastore file [datastore2] fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1633.067057] env[68233]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aab88b03-ad01-4143-8653-cf110cc37f14 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.072535] env[68233]: DEBUG oslo_vmware.api [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for the task: (returnval){ [ 1633.072535] env[68233]: value = "task-2783504" [ 1633.072535] env[68233]: _type = "Task" [ 1633.072535] env[68233]: } to complete. {{(pid=68233) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.080012] env[68233]: DEBUG oslo_vmware.api [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.582190] env[68233]: DEBUG oslo_vmware.api [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Task: {'id': task-2783504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130728} completed successfully. {{(pid=68233) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.582558] env[68233]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted the datastore file {{(pid=68233) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1633.582607] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deleted contents of the VM from datastore datastore2 {{(pid=68233) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1633.582779] env[68233]: DEBUG nova.virt.vmwareapi.vmops [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Instance destroyed {{(pid=68233) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1633.582953] env[68233]: INFO nova.compute.manager [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1633.583224] env[68233]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68233) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1633.583410] env[68233]: DEBUG nova.compute.manager [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Deallocating network for instance {{(pid=68233) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1633.583509] env[68233]: DEBUG nova.network.neutron [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] deallocate_for_instance() {{(pid=68233) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1634.035286] env[68233]: DEBUG nova.compute.manager [req-acd7d9bf-17a0-4e42-b7db-fbda996cf782 req-0f24cc35-5b27-40a3-a97f-4ef5983b0701 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Received event network-vif-deleted-d3173b1c-b02e-4c91-90fc-b8fd76b7e954 {{(pid=68233) external_instance_event /opt/stack/nova/nova/compute/manager.py:11656}} [ 1634.035550] env[68233]: INFO nova.compute.manager [req-acd7d9bf-17a0-4e42-b7db-fbda996cf782 req-0f24cc35-5b27-40a3-a97f-4ef5983b0701 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Neutron deleted interface d3173b1c-b02e-4c91-90fc-b8fd76b7e954; detaching it from the instance and deleting it from the info cache [ 1634.035697] env[68233]: DEBUG nova.network.neutron [req-acd7d9bf-17a0-4e42-b7db-fbda996cf782 req-0f24cc35-5b27-40a3-a97f-4ef5983b0701 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.518340] env[68233]: DEBUG nova.network.neutron [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Updating instance_info_cache with network_info: [] {{(pid=68233) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.538499] env[68233]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caf502a7-7065-4b33-8ada-ea7c52c7a2a6 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.548563] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a715dc9a-d7b6-42da-b674-01b002671efd {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.572424] env[68233]: DEBUG nova.compute.manager [req-acd7d9bf-17a0-4e42-b7db-fbda996cf782 req-0f24cc35-5b27-40a3-a97f-4ef5983b0701 service nova] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Detach interface failed, port_id=d3173b1c-b02e-4c91-90fc-b8fd76b7e954, reason: Instance fa192f7d-76e7-4cab-b32e-af8ef3c5839a could not be found. {{(pid=68233) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11490}} [ 1635.021411] env[68233]: INFO nova.compute.manager [-] [instance: fa192f7d-76e7-4cab-b32e-af8ef3c5839a] Took 1.44 seconds to deallocate network for instance. [ 1635.528525] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1635.528832] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1635.529077] env[68233]: DEBUG nova.objects.instance [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lazy-loading 'resources' on Instance uuid fa192f7d-76e7-4cab-b32e-af8ef3c5839a {{(pid=68233) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1636.062746] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59def9d8-82de-4ea7-9688-162ae3b5d097 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.071344] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cea0adc-dfd1-4440-836c-930414b67d17 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.101019] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1fb3953-a7bc-458d-b0aa-00f9f3daeab5 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.107746] env[68233]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9029580d-1a52-418e-9167-c9450ab25e95 {{(pid=68233) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.120468] env[68233]: DEBUG nova.compute.provider_tree [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed in ProviderTree for provider: 51aa13e7-0977-4031-b209-4ae90c83752c {{(pid=68233) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.623909] env[68233]: DEBUG nova.scheduler.client.report [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Inventory has not changed for provider 51aa13e7-0977-4031-b209-4ae90c83752c based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 176, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68233) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1637.129700] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1637.151191] env[68233]: INFO nova.scheduler.client.report [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Deleted allocations for instance fa192f7d-76e7-4cab-b32e-af8ef3c5839a [ 1637.659244] env[68233]: DEBUG oslo_concurrency.lockutils [None req-1ecef772-0cf5-4133-bc49-6a815a7c6b40 tempest-ServerActionsTestOtherB-606437413 tempest-ServerActionsTestOtherB-606437413-project-member] Lock "fa192f7d-76e7-4cab-b32e-af8ef3c5839a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.693s {{(pid=68233) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}